diff --git a/testbed/django__django/django/contrib/__init__.py b/testbed/django__django/django/contrib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/sites/__init__.py b/testbed/django__django/django/contrib/sites/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/sites/admin.py b/testbed/django__django/django/contrib/sites/admin.py new file mode 100644 index 0000000000000000000000000000000000000000..53ad53d62225d8579314870801e8fee5c2513c77 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/admin.py @@ -0,0 +1,8 @@ +from django.contrib import admin +from django.contrib.sites.models import Site + + +@admin.register(Site) +class SiteAdmin(admin.ModelAdmin): + list_display = ("domain", "name") + search_fields = ("domain", "name") diff --git a/testbed/django__django/django/contrib/sites/apps.py b/testbed/django__django/django/contrib/sites/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..ac51a84e180b7c556833a31dec4929b39e38350d --- /dev/null +++ b/testbed/django__django/django/contrib/sites/apps.py @@ -0,0 +1,17 @@ +from django.apps import AppConfig +from django.contrib.sites.checks import check_site_id +from django.core import checks +from django.db.models.signals import post_migrate +from django.utils.translation import gettext_lazy as _ + +from .management import create_default_site + + +class SitesConfig(AppConfig): + default_auto_field = "django.db.models.AutoField" + name = "django.contrib.sites" + verbose_name = _("Sites") + + def ready(self): + post_migrate.connect(create_default_site, sender=self) + checks.register(check_site_id, checks.Tags.sites) diff --git a/testbed/django__django/django/contrib/sites/checks.py b/testbed/django__django/django/contrib/sites/checks.py new file mode 100644 index 0000000000000000000000000000000000000000..66e8551bed022b64dde6118dea8676b1355b4f1b --- /dev/null +++ b/testbed/django__django/django/contrib/sites/checks.py @@ -0,0 +1,14 @@ +from types import NoneType + +from django.conf import settings +from django.core.checks import Error + + +def check_site_id(app_configs, **kwargs): + if hasattr(settings, "SITE_ID") and not isinstance( + settings.SITE_ID, (NoneType, int) + ): + return [ + Error("The SITE_ID setting must be an integer", id="sites.E101"), + ] + return [] diff --git a/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..9ae5e760d1a43680144e621c8194b09e74f9b15f Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..aeaca57df1d788be62c92b2bda7e63a9a312eb1b --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/ckb/LC_MESSAGES/django.po @@ -0,0 +1,36 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# kosar tofiq , 2020 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2023-04-24 18:05+0000\n" +"Last-Translator: kosar tofiq , 2020\n" +"Language-Team: Central Kurdish (http://www.transifex.com/django/django/" +"language/ckb/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ckb\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Sites" +msgstr "ماڵپەڕەکان" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "ناوی دۆمەین نابێت بۆشایی یان تابی تێدابێت." + +msgid "domain name" +msgstr "ناوی دۆمەین" + +msgid "display name" +msgstr "ناوی پیشاندان" + +msgid "site" +msgstr "ماڵپەڕ" + +msgid "sites" +msgstr "ماڵپەڕەکان" diff --git a/testbed/django__django/django/contrib/sites/locale/te/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/te/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..9be611dfde741b1038b00badd4eb6934ed7a7fc8 Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/te/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..b0778139166e720434a787f1398a8994e07bc3c9 Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..2372c1463247b25b44654ef3a55b08d8b0a06bf2 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/tr/LC_MESSAGES/django.po @@ -0,0 +1,39 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Ahmet Emre Aladağ , 2014 +# BouRock, 2014 +# Caner Başaran , 2013 +# Jannis Leidel , 2011 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-23 18:54+0000\n" +"Last-Translator: Jannis Leidel \n" +"Language-Team: Turkish (http://www.transifex.com/django/django/language/" +"tr/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: tr\n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" + +msgid "Sites" +msgstr "Siteler" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "Etki alanı adı, herhangi bir boşluk ya da sekme içeremez." + +msgid "domain name" +msgstr "etki alanı adı" + +msgid "display name" +msgstr "görünen isim" + +msgid "site" +msgstr "site" + +msgid "sites" +msgstr "siteler" diff --git a/testbed/django__django/django/contrib/sites/locale/tt/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/tt/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..384d79a9ab5c1feb2313bbd70ceb3b827b801da3 Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/tt/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..af7ab53bb6735a670114ecf738e973e4f5831251 Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..26b17e7b5bf43a6f9eced82c1d5423b878817d12 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/udm/LC_MESSAGES/django.po @@ -0,0 +1,35 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2014-10-05 20:13+0000\n" +"Last-Translator: Jannis Leidel \n" +"Language-Team: Udmurt (http://www.transifex.com/projects/p/django/language/" +"udm/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: udm\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "Sites" +msgstr "" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "" + +msgid "domain name" +msgstr "" + +msgid "display name" +msgstr "" + +msgid "site" +msgstr "" + +msgid "sites" +msgstr "" diff --git a/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..733aebd7bfef635e9d9f269dca428d4526b5fcbc Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..b16ddb248721a6cfb23b32487210a7c6f95844c4 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/ur/LC_MESSAGES/django.po @@ -0,0 +1,35 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Mansoorulhaq Mansoor , 2011 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-19 16:40+0000\n" +"Last-Translator: Jannis Leidel \n" +"Language-Team: Urdu (http://www.transifex.com/django/django/language/ur/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: ur\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +msgid "Sites" +msgstr "" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "" + +msgid "domain name" +msgstr "ڈومین کا نام" + +msgid "display name" +msgstr "ظاھر ھونے والا نام" + +msgid "site" +msgstr "سائٹ" + +msgid "sites" +msgstr "سائٹس" diff --git a/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..f1c68e3283c417e7c985d650d85756b2ab30d5da Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..4f26906e91d649cc8c5061867fdc1f6422cbdc39 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/uz/LC_MESSAGES/django.po @@ -0,0 +1,35 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Nuruddin Iminokhunov , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-23 01:18+0000\n" +"Last-Translator: Nuruddin Iminokhunov \n" +"Language-Team: Uzbek (http://www.transifex.com/django/django/language/uz/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: uz\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "Sites" +msgstr "Saytlar" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "Domen ismi tab`lar va bo'shliqlarsiz bo'lishi kerak" + +msgid "domain name" +msgstr "domen nomi" + +msgid "display name" +msgstr "ko'rsatiladigan ismi" + +msgid "site" +msgstr "sayt" + +msgid "sites" +msgstr "saytlar" diff --git a/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..36fa0755f7903ef1d48e25bd69289c7cbcb630f6 Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..6f7d6a01a8f05f52b1f67f12989a60fae0ae6cb6 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/vi/LC_MESSAGES/django.po @@ -0,0 +1,38 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Jannis Leidel , 2011 +# Thanh Le Viet , 2013 +# Tran , 2011 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-23 18:54+0000\n" +"Last-Translator: Tran Van \n" +"Language-Team: Vietnamese (http://www.transifex.com/django/django/language/" +"vi/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: vi\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "Sites" +msgstr "" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "Tên miền không gồm kí tự trống hoặc tab" + +msgid "domain name" +msgstr "Tên miền" + +msgid "display name" +msgstr "Tên hiển thị" + +msgid "site" +msgstr "trang web" + +msgid "sites" +msgstr "các trang web" diff --git a/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..028280ce343e717a76c42128ca4d3769cf7e904c Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..82f53faeedef2b0bc2e355e4f5e42439035f50b0 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/zh_Hans/LC_MESSAGES/django.po @@ -0,0 +1,37 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Jannis Leidel , 2011 +# Ronald White , 2014 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-19 16:40+0000\n" +"Last-Translator: Jannis Leidel \n" +"Language-Team: Chinese (China) (http://www.transifex.com/django/django/" +"language/zh_CN/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: zh_CN\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "Sites" +msgstr "站点" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "域名不能包含任何空格或制表符。" + +msgid "domain name" +msgstr "域名" + +msgid "display name" +msgstr "显示名称" + +msgid "site" +msgstr "站点" + +msgid "sites" +msgstr "站点" diff --git a/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.mo b/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.mo new file mode 100644 index 0000000000000000000000000000000000000000..f1c3d67b36ab955e95d10a730f0ebeb859363d8d Binary files /dev/null and b/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.mo differ diff --git a/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po b/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po new file mode 100644 index 0000000000000000000000000000000000000000..d1a5583f8cde78f327cad686afbed490a6d4279e --- /dev/null +++ b/testbed/django__django/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po @@ -0,0 +1,39 @@ +# This file is distributed under the same license as the Django package. +# +# Translators: +# Chen Chun-Chia , 2015 +# Jannis Leidel , 2011 +# mail6543210 , 2013 +# Tzu-ping Chung , 2016 +msgid "" +msgstr "" +"Project-Id-Version: django\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2015-01-17 11:07+0100\n" +"PO-Revision-Date: 2017-09-19 16:40+0000\n" +"Last-Translator: Tzu-ping Chung \n" +"Language-Team: Chinese (Taiwan) (http://www.transifex.com/django/django/" +"language/zh_TW/)\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Language: zh_TW\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +msgid "Sites" +msgstr "網站" + +msgid "The domain name cannot contain any spaces or tabs." +msgstr "網域名稱不能包含空格或定位字元。" + +msgid "domain name" +msgstr "網域名稱" + +msgid "display name" +msgstr "顯示名稱" + +msgid "site" +msgstr "網站" + +msgid "sites" +msgstr "網站" diff --git a/testbed/django__django/django/contrib/sites/management.py b/testbed/django__django/django/contrib/sites/management.py new file mode 100644 index 0000000000000000000000000000000000000000..dd75bc1ba96a246db7dfadf2dad0b836e14d4e87 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/management.py @@ -0,0 +1,47 @@ +""" +Creates the default Site object. +""" + +from django.apps import apps as global_apps +from django.conf import settings +from django.core.management.color import no_style +from django.db import DEFAULT_DB_ALIAS, connections, router + + +def create_default_site( + app_config, + verbosity=2, + interactive=True, + using=DEFAULT_DB_ALIAS, + apps=global_apps, + **kwargs, +): + try: + Site = apps.get_model("sites", "Site") + except LookupError: + return + + if not router.allow_migrate_model(using, Site): + return + + if not Site.objects.using(using).exists(): + # The default settings set SITE_ID = 1, and some tests in Django's test + # suite rely on this value. However, if database sequences are reused + # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that + # the next id will be 1, so we coerce it. See #15573 and #16353. This + # can also crop up outside of tests - see #15346. + if verbosity >= 2: + print("Creating example.com Site object") + Site( + pk=getattr(settings, "SITE_ID", 1), domain="example.com", name="example.com" + ).save(using=using) + + # We set an explicit pk instead of relying on auto-incrementation, + # so we need to reset the database sequence. See #17415. + sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site]) + if sequence_sql: + if verbosity >= 2: + print("Resetting sequence") + with connections[using].cursor() as cursor: + for command in sequence_sql: + cursor.execute(command) diff --git a/testbed/django__django/django/contrib/sites/managers.py b/testbed/django__django/django/contrib/sites/managers.py new file mode 100644 index 0000000000000000000000000000000000000000..ec2882656053487e8c5c1aec79f521a8da02da97 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/managers.py @@ -0,0 +1,65 @@ +from django.conf import settings +from django.core import checks +from django.core.exceptions import FieldDoesNotExist +from django.db import models + + +class CurrentSiteManager(models.Manager): + "Use this to limit objects to those associated with the current site." + + use_in_migrations = True + + def __init__(self, field_name=None): + super().__init__() + self.__field_name = field_name + + def check(self, **kwargs): + errors = super().check(**kwargs) + errors.extend(self._check_field_name()) + return errors + + def _check_field_name(self): + field_name = self._get_field_name() + try: + field = self.model._meta.get_field(field_name) + except FieldDoesNotExist: + return [ + checks.Error( + "CurrentSiteManager could not find a field named '%s'." + % field_name, + obj=self, + id="sites.E001", + ) + ] + + if not field.many_to_many and not isinstance(field, (models.ForeignKey)): + return [ + checks.Error( + "CurrentSiteManager cannot use '%s.%s' as it is not a foreign key " + "or a many-to-many field." + % (self.model._meta.object_name, field_name), + obj=self, + id="sites.E002", + ) + ] + + return [] + + def _get_field_name(self): + """Return self.__field_name or 'site' or 'sites'.""" + + if not self.__field_name: + try: + self.model._meta.get_field("site") + except FieldDoesNotExist: + self.__field_name = "sites" + else: + self.__field_name = "site" + return self.__field_name + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(**{self._get_field_name() + "__id": settings.SITE_ID}) + ) diff --git a/testbed/django__django/django/contrib/sites/middleware.py b/testbed/django__django/django/contrib/sites/middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..bc3bf20c48b5b2e59b5a2482d2cc35037e67c30f --- /dev/null +++ b/testbed/django__django/django/contrib/sites/middleware.py @@ -0,0 +1,12 @@ +from django.utils.deprecation import MiddlewareMixin + +from .shortcuts import get_current_site + + +class CurrentSiteMiddleware(MiddlewareMixin): + """ + Middleware that sets `site` attribute to request object. + """ + + def process_request(self, request): + request.site = get_current_site(request) diff --git a/testbed/django__django/django/contrib/sites/migrations/0001_initial.py b/testbed/django__django/django/contrib/sites/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..a23f0f129b1dc6c7b746a78f453261c5d6a8c5fb --- /dev/null +++ b/testbed/django__django/django/contrib/sites/migrations/0001_initial.py @@ -0,0 +1,43 @@ +import django.contrib.sites.models +from django.contrib.sites.models import _simple_domain_name_validator +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Site", + fields=[ + ( + "id", + models.AutoField( + verbose_name="ID", + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ( + "domain", + models.CharField( + max_length=100, + verbose_name="domain name", + validators=[_simple_domain_name_validator], + ), + ), + ("name", models.CharField(max_length=50, verbose_name="display name")), + ], + options={ + "ordering": ["domain"], + "db_table": "django_site", + "verbose_name": "site", + "verbose_name_plural": "sites", + }, + bases=(models.Model,), + managers=[ + ("objects", django.contrib.sites.models.SiteManager()), + ], + ), + ] diff --git a/testbed/django__django/django/contrib/sites/migrations/0002_alter_domain_unique.py b/testbed/django__django/django/contrib/sites/migrations/0002_alter_domain_unique.py new file mode 100644 index 0000000000000000000000000000000000000000..e379a91f67efcdf7440b6f519749f6ac3c6c1639 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/migrations/0002_alter_domain_unique.py @@ -0,0 +1,21 @@ +import django.contrib.sites.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("sites", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="site", + name="domain", + field=models.CharField( + max_length=100, + unique=True, + validators=[django.contrib.sites.models._simple_domain_name_validator], + verbose_name="domain name", + ), + ), + ] diff --git a/testbed/django__django/django/contrib/sites/migrations/__init__.py b/testbed/django__django/django/contrib/sites/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/sites/models.py b/testbed/django__django/django/contrib/sites/models.py new file mode 100644 index 0000000000000000000000000000000000000000..7f8bce920e7ab0a44b1cdac3bf07cf85f4da536c --- /dev/null +++ b/testbed/django__django/django/contrib/sites/models.py @@ -0,0 +1,120 @@ +import string + +from django.core.exceptions import ImproperlyConfigured, ValidationError +from django.db import models +from django.db.models.signals import pre_delete, pre_save +from django.http.request import split_domain_port +from django.utils.translation import gettext_lazy as _ + +SITE_CACHE = {} + + +def _simple_domain_name_validator(value): + """ + Validate that the given value contains no whitespaces to prevent common + typos. + """ + checks = ((s in value) for s in string.whitespace) + if any(checks): + raise ValidationError( + _("The domain name cannot contain any spaces or tabs."), + code="invalid", + ) + + +class SiteManager(models.Manager): + use_in_migrations = True + + def _get_site_by_id(self, site_id): + if site_id not in SITE_CACHE: + site = self.get(pk=site_id) + SITE_CACHE[site_id] = site + return SITE_CACHE[site_id] + + def _get_site_by_request(self, request): + host = request.get_host() + try: + # First attempt to look up the site by host with or without port. + if host not in SITE_CACHE: + SITE_CACHE[host] = self.get(domain__iexact=host) + return SITE_CACHE[host] + except Site.DoesNotExist: + # Fallback to looking up site after stripping port from the host. + domain, port = split_domain_port(host) + if domain not in SITE_CACHE: + SITE_CACHE[domain] = self.get(domain__iexact=domain) + return SITE_CACHE[domain] + + def get_current(self, request=None): + """ + Return the current Site based on the SITE_ID in the project's settings. + If SITE_ID isn't defined, return the site with domain matching + request.get_host(). The ``Site`` object is cached the first time it's + retrieved from the database. + """ + from django.conf import settings + + if getattr(settings, "SITE_ID", ""): + site_id = settings.SITE_ID + return self._get_site_by_id(site_id) + elif request: + return self._get_site_by_request(request) + + raise ImproperlyConfigured( + 'You\'re using the Django "sites framework" without having ' + "set the SITE_ID setting. Create a site in your database and " + "set the SITE_ID setting or pass a request to " + "Site.objects.get_current() to fix this error." + ) + + def clear_cache(self): + """Clear the ``Site`` object cache.""" + global SITE_CACHE + SITE_CACHE = {} + + def get_by_natural_key(self, domain): + return self.get(domain=domain) + + +class Site(models.Model): + domain = models.CharField( + _("domain name"), + max_length=100, + validators=[_simple_domain_name_validator], + unique=True, + ) + name = models.CharField(_("display name"), max_length=50) + + objects = SiteManager() + + class Meta: + db_table = "django_site" + verbose_name = _("site") + verbose_name_plural = _("sites") + ordering = ["domain"] + + def __str__(self): + return self.domain + + def natural_key(self): + return (self.domain,) + + +def clear_site_cache(sender, **kwargs): + """ + Clear the cache (if primed) each time a site is saved or deleted. + """ + instance = kwargs["instance"] + using = kwargs["using"] + try: + del SITE_CACHE[instance.pk] + except KeyError: + pass + try: + del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain] + except (KeyError, Site.DoesNotExist): + pass + + +pre_save.connect(clear_site_cache, sender=Site) +pre_delete.connect(clear_site_cache, sender=Site) diff --git a/testbed/django__django/django/contrib/sites/requests.py b/testbed/django__django/django/contrib/sites/requests.py new file mode 100644 index 0000000000000000000000000000000000000000..a0c9c18aa379e2501c71cffa6d40fcaf77de01f5 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/requests.py @@ -0,0 +1,20 @@ +class RequestSite: + """ + A class that shares the primary interface of Site (i.e., it has ``domain`` + and ``name`` attributes) but gets its data from an HttpRequest object + rather than from a database. + + The save() and delete() methods raise NotImplementedError. + """ + + def __init__(self, request): + self.domain = self.name = request.get_host() + + def __str__(self): + return self.domain + + def save(self, force_insert=False, force_update=False): + raise NotImplementedError("RequestSite cannot be saved.") + + def delete(self): + raise NotImplementedError("RequestSite cannot be deleted.") diff --git a/testbed/django__django/django/contrib/sites/shortcuts.py b/testbed/django__django/django/contrib/sites/shortcuts.py new file mode 100644 index 0000000000000000000000000000000000000000..d4b65101afdc336869e3dde468bdeec6d2137aa6 --- /dev/null +++ b/testbed/django__django/django/contrib/sites/shortcuts.py @@ -0,0 +1,18 @@ +from django.apps import apps + +from .requests import RequestSite + + +def get_current_site(request): + """ + Check if contrib.sites is installed and return either the current + ``Site`` object or a ``RequestSite`` object based on the request. + """ + # Import is inside the function because its point is to avoid importing the + # Site models when django.contrib.sites isn't installed. + if apps.is_installed("django.contrib.sites"): + from .models import Site + + return Site.objects.get_current(request) + else: + return RequestSite(request) diff --git a/testbed/django__django/django/contrib/staticfiles/__init__.py b/testbed/django__django/django/contrib/staticfiles/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/staticfiles/apps.py b/testbed/django__django/django/contrib/staticfiles/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..67acf042aa1bd03e9c03ea5922c2e581a1354006 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/apps.py @@ -0,0 +1,13 @@ +from django.apps import AppConfig +from django.contrib.staticfiles.checks import check_finders +from django.core import checks +from django.utils.translation import gettext_lazy as _ + + +class StaticFilesConfig(AppConfig): + name = "django.contrib.staticfiles" + verbose_name = _("Static Files") + ignore_patterns = ["CVS", ".*", "*~"] + + def ready(self): + checks.register(check_finders, checks.Tags.staticfiles) diff --git a/testbed/django__django/django/contrib/staticfiles/checks.py b/testbed/django__django/django/contrib/staticfiles/checks.py new file mode 100644 index 0000000000000000000000000000000000000000..fb57bf726db357d2d5cfeb1444248b6f1e66d15c --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/checks.py @@ -0,0 +1,14 @@ +from django.contrib.staticfiles.finders import get_finders + + +def check_finders(app_configs=None, **kwargs): + """Check all registered staticfiles finders.""" + errors = [] + for finder in get_finders(): + try: + finder_errors = finder.check() + except NotImplementedError: + pass + else: + errors.extend(finder_errors) + return errors diff --git a/testbed/django__django/django/contrib/staticfiles/finders.py b/testbed/django__django/django/contrib/staticfiles/finders.py new file mode 100644 index 0000000000000000000000000000000000000000..112a81d279d3a6ffa89edb848443fc6b0d4eb989 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/finders.py @@ -0,0 +1,326 @@ +import functools +import os + +from django.apps import apps +from django.conf import settings +from django.contrib.staticfiles import utils +from django.core.checks import Error, Warning +from django.core.exceptions import ImproperlyConfigured +from django.core.files.storage import FileSystemStorage, Storage, default_storage +from django.utils._os import safe_join +from django.utils.functional import LazyObject, empty +from django.utils.module_loading import import_string + +# To keep track on which directories the finder has searched the static files. +searched_locations = [] + + +class BaseFinder: + """ + A base file finder to be used for custom staticfiles finder classes. + """ + + def check(self, **kwargs): + raise NotImplementedError( + "subclasses may provide a check() method to verify the finder is " + "configured correctly." + ) + + def find(self, path, all=False): + """ + Given a relative file path, find an absolute file path. + + If the ``all`` parameter is False (default) return only the first found + file path; if True, return a list of all found files paths. + """ + raise NotImplementedError( + "subclasses of BaseFinder must provide a find() method" + ) + + def list(self, ignore_patterns): + """ + Given an optional list of paths to ignore, return a two item iterable + consisting of the relative path and storage instance. + """ + raise NotImplementedError( + "subclasses of BaseFinder must provide a list() method" + ) + + +class FileSystemFinder(BaseFinder): + """ + A static files finder that uses the ``STATICFILES_DIRS`` setting + to locate files. + """ + + def __init__(self, app_names=None, *args, **kwargs): + # List of locations with static files + self.locations = [] + # Maps dir paths to an appropriate storage instance + self.storages = {} + for root in settings.STATICFILES_DIRS: + if isinstance(root, (list, tuple)): + prefix, root = root + else: + prefix = "" + if (prefix, root) not in self.locations: + self.locations.append((prefix, root)) + for prefix, root in self.locations: + filesystem_storage = FileSystemStorage(location=root) + filesystem_storage.prefix = prefix + self.storages[root] = filesystem_storage + super().__init__(*args, **kwargs) + + def check(self, **kwargs): + errors = [] + if not isinstance(settings.STATICFILES_DIRS, (list, tuple)): + errors.append( + Error( + "The STATICFILES_DIRS setting is not a tuple or list.", + hint="Perhaps you forgot a trailing comma?", + id="staticfiles.E001", + ) + ) + return errors + for root in settings.STATICFILES_DIRS: + if isinstance(root, (list, tuple)): + prefix, root = root + if prefix.endswith("/"): + errors.append( + Error( + "The prefix %r in the STATICFILES_DIRS setting must " + "not end with a slash." % prefix, + id="staticfiles.E003", + ) + ) + if settings.STATIC_ROOT and os.path.abspath( + settings.STATIC_ROOT + ) == os.path.abspath(root): + errors.append( + Error( + "The STATICFILES_DIRS setting should not contain the " + "STATIC_ROOT setting.", + id="staticfiles.E002", + ) + ) + if not os.path.isdir(root): + errors.append( + Warning( + f"The directory '{root}' in the STATICFILES_DIRS setting " + f"does not exist.", + id="staticfiles.W004", + ) + ) + return errors + + def find(self, path, all=False): + """ + Look for files in the extra locations as defined in STATICFILES_DIRS. + """ + matches = [] + for prefix, root in self.locations: + if root not in searched_locations: + searched_locations.append(root) + matched_path = self.find_location(root, path, prefix) + if matched_path: + if not all: + return matched_path + matches.append(matched_path) + return matches + + def find_location(self, root, path, prefix=None): + """ + Find a requested static file in a location and return the found + absolute path (or ``None`` if no match). + """ + if prefix: + prefix = "%s%s" % (prefix, os.sep) + if not path.startswith(prefix): + return None + path = path.removeprefix(prefix) + path = safe_join(root, path) + if os.path.exists(path): + return path + + def list(self, ignore_patterns): + """ + List all files in all locations. + """ + for prefix, root in self.locations: + # Skip nonexistent directories. + if os.path.isdir(root): + storage = self.storages[root] + for path in utils.get_files(storage, ignore_patterns): + yield path, storage + + +class AppDirectoriesFinder(BaseFinder): + """ + A static files finder that looks in the directory of each app as + specified in the source_dir attribute. + """ + + storage_class = FileSystemStorage + source_dir = "static" + + def __init__(self, app_names=None, *args, **kwargs): + # The list of apps that are handled + self.apps = [] + # Mapping of app names to storage instances + self.storages = {} + app_configs = apps.get_app_configs() + if app_names: + app_names = set(app_names) + app_configs = [ac for ac in app_configs if ac.name in app_names] + for app_config in app_configs: + app_storage = self.storage_class( + os.path.join(app_config.path, self.source_dir) + ) + if os.path.isdir(app_storage.location): + self.storages[app_config.name] = app_storage + if app_config.name not in self.apps: + self.apps.append(app_config.name) + super().__init__(*args, **kwargs) + + def list(self, ignore_patterns): + """ + List all files in all app storages. + """ + for storage in self.storages.values(): + if storage.exists(""): # check if storage location exists + for path in utils.get_files(storage, ignore_patterns): + yield path, storage + + def find(self, path, all=False): + """ + Look for files in the app directories. + """ + matches = [] + for app in self.apps: + app_location = self.storages[app].location + if app_location not in searched_locations: + searched_locations.append(app_location) + match = self.find_in_app(app, path) + if match: + if not all: + return match + matches.append(match) + return matches + + def find_in_app(self, app, path): + """ + Find a requested static file in an app's static locations. + """ + storage = self.storages.get(app) + # Only try to find a file if the source dir actually exists. + if storage and storage.exists(path): + matched_path = storage.path(path) + if matched_path: + return matched_path + + +class BaseStorageFinder(BaseFinder): + """ + A base static files finder to be used to extended + with an own storage class. + """ + + storage = None + + def __init__(self, storage=None, *args, **kwargs): + if storage is not None: + self.storage = storage + if self.storage is None: + raise ImproperlyConfigured( + "The staticfiles storage finder %r " + "doesn't have a storage class " + "assigned." % self.__class__ + ) + # Make sure we have a storage instance here. + if not isinstance(self.storage, (Storage, LazyObject)): + self.storage = self.storage() + super().__init__(*args, **kwargs) + + def find(self, path, all=False): + """ + Look for files in the default file storage, if it's local. + """ + try: + self.storage.path("") + except NotImplementedError: + pass + else: + if self.storage.location not in searched_locations: + searched_locations.append(self.storage.location) + if self.storage.exists(path): + match = self.storage.path(path) + if all: + match = [match] + return match + return [] + + def list(self, ignore_patterns): + """ + List all files of the storage. + """ + for path in utils.get_files(self.storage, ignore_patterns): + yield path, self.storage + + +class DefaultStorageFinder(BaseStorageFinder): + """ + A static files finder that uses the default storage backend. + """ + + storage = default_storage + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + base_location = getattr(self.storage, "base_location", empty) + if not base_location: + raise ImproperlyConfigured( + "The storage backend of the " + "staticfiles finder %r doesn't have " + "a valid location." % self.__class__ + ) + + +def find(path, all=False): + """ + Find a static file with the given path using all enabled finders. + + If ``all`` is ``False`` (default), return the first matching + absolute path (or ``None`` if no match). Otherwise return a list. + """ + searched_locations[:] = [] + matches = [] + for finder in get_finders(): + result = finder.find(path, all=all) + if not all and result: + return result + if not isinstance(result, (list, tuple)): + result = [result] + matches.extend(result) + if matches: + return matches + # No match. + return [] if all else None + + +def get_finders(): + for finder_path in settings.STATICFILES_FINDERS: + yield get_finder(finder_path) + + +@functools.cache +def get_finder(import_path): + """ + Import the staticfiles finder class described by import_path, where + import_path is the full Python path to the class. + """ + Finder = import_string(import_path) + if not issubclass(Finder, BaseFinder): + raise ImproperlyConfigured( + 'Finder "%s" is not a subclass of "%s"' % (Finder, BaseFinder) + ) + return Finder() diff --git a/testbed/django__django/django/contrib/staticfiles/handlers.py b/testbed/django__django/django/contrib/staticfiles/handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..7394eff81857be029e90956b4bb0bae8ed84b397 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/handlers.py @@ -0,0 +1,115 @@ +from urllib.parse import urlparse +from urllib.request import url2pathname + +from asgiref.sync import sync_to_async + +from django.conf import settings +from django.contrib.staticfiles import utils +from django.contrib.staticfiles.views import serve +from django.core.handlers.asgi import ASGIHandler +from django.core.handlers.exception import response_for_exception +from django.core.handlers.wsgi import WSGIHandler, get_path_info +from django.http import Http404 + + +class StaticFilesHandlerMixin: + """ + Common methods used by WSGI and ASGI handlers. + """ + + # May be used to differentiate between handler types (e.g. in a + # request_finished signal) + handles_files = True + + def load_middleware(self): + # Middleware are already loaded for self.application; no need to reload + # them for self. + pass + + def get_base_url(self): + utils.check_settings() + return settings.STATIC_URL + + def _should_handle(self, path): + """ + Check if the path should be handled. Ignore the path if: + * the host is provided as part of the base_url + * the request's path isn't under the media path (or equal) + """ + return path.startswith(self.base_url[2]) and not self.base_url[1] + + def file_path(self, url): + """ + Return the relative path to the media file on disk for the given URL. + """ + relative_url = url.removeprefix(self.base_url[2]) + return url2pathname(relative_url) + + def serve(self, request): + """Serve the request path.""" + return serve(request, self.file_path(request.path), insecure=True) + + def get_response(self, request): + try: + return self.serve(request) + except Http404 as e: + return response_for_exception(request, e) + + async def get_response_async(self, request): + try: + return await sync_to_async(self.serve, thread_sensitive=False)(request) + except Http404 as e: + return await sync_to_async(response_for_exception, thread_sensitive=False)( + request, e + ) + + +class StaticFilesHandler(StaticFilesHandlerMixin, WSGIHandler): + """ + WSGI middleware that intercepts calls to the static files directory, as + defined by the STATIC_URL setting, and serves those files. + """ + + def __init__(self, application): + self.application = application + self.base_url = urlparse(self.get_base_url()) + super().__init__() + + def __call__(self, environ, start_response): + if not self._should_handle(get_path_info(environ)): + return self.application(environ, start_response) + return super().__call__(environ, start_response) + + +class ASGIStaticFilesHandler(StaticFilesHandlerMixin, ASGIHandler): + """ + ASGI application which wraps another and intercepts requests for static + files, passing them off to Django's static file serving. + """ + + def __init__(self, application): + self.application = application + self.base_url = urlparse(self.get_base_url()) + + async def __call__(self, scope, receive, send): + # Only even look at HTTP requests + if scope["type"] == "http" and self._should_handle(scope["path"]): + # Serve static content + # (the one thing super() doesn't do is __call__, apparently) + return await super().__call__(scope, receive, send) + # Hand off to the main app + return await self.application(scope, receive, send) + + async def get_response_async(self, request): + response = await super().get_response_async(request) + response._resource_closers.append(request.close) + # FileResponse is not async compatible. + if response.streaming and not response.is_async: + _iterator = response.streaming_content + + async def awrapper(): + for part in await sync_to_async(list)(_iterator): + yield part + + response.streaming_content = awrapper() + return response diff --git a/testbed/django__django/django/contrib/staticfiles/management/__init__.py b/testbed/django__django/django/contrib/staticfiles/management/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/staticfiles/management/commands/__init__.py b/testbed/django__django/django/contrib/staticfiles/management/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/staticfiles/management/commands/collectstatic.py b/testbed/django__django/django/contrib/staticfiles/management/commands/collectstatic.py new file mode 100644 index 0000000000000000000000000000000000000000..c346038df87f5b874a69bba1d434919a9f93f2c0 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/management/commands/collectstatic.py @@ -0,0 +1,379 @@ +import os + +from django.apps import apps +from django.contrib.staticfiles.finders import get_finders +from django.contrib.staticfiles.storage import staticfiles_storage +from django.core.checks import Tags +from django.core.files.storage import FileSystemStorage +from django.core.management.base import BaseCommand, CommandError +from django.core.management.color import no_style +from django.utils.functional import cached_property + + +class Command(BaseCommand): + """ + Copies or symlinks static files from different locations to the + settings.STATIC_ROOT. + """ + + help = "Collect static files in a single location." + requires_system_checks = [Tags.staticfiles] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.copied_files = [] + self.symlinked_files = [] + self.unmodified_files = [] + self.post_processed_files = [] + self.storage = staticfiles_storage + self.style = no_style() + + @cached_property + def local(self): + try: + self.storage.path("") + except NotImplementedError: + return False + return True + + def add_arguments(self, parser): + parser.add_argument( + "--noinput", + "--no-input", + action="store_false", + dest="interactive", + help="Do NOT prompt the user for input of any kind.", + ) + parser.add_argument( + "--no-post-process", + action="store_false", + dest="post_process", + help="Do NOT post process collected files.", + ) + parser.add_argument( + "-i", + "--ignore", + action="append", + default=[], + dest="ignore_patterns", + metavar="PATTERN", + help="Ignore files or directories matching this glob-style " + "pattern. Use multiple times to ignore more.", + ) + parser.add_argument( + "-n", + "--dry-run", + action="store_true", + help="Do everything except modify the filesystem.", + ) + parser.add_argument( + "-c", + "--clear", + action="store_true", + help="Clear the existing files using the storage " + "before trying to copy or link the original file.", + ) + parser.add_argument( + "-l", + "--link", + action="store_true", + help="Create a symbolic link to each file instead of copying.", + ) + parser.add_argument( + "--no-default-ignore", + action="store_false", + dest="use_default_ignore_patterns", + help=( + "Don't ignore the common private glob-style patterns (defaults to " + "'CVS', '.*' and '*~')." + ), + ) + + def set_options(self, **options): + """ + Set instance variables based on an options dict + """ + self.interactive = options["interactive"] + self.verbosity = options["verbosity"] + self.symlink = options["link"] + self.clear = options["clear"] + self.dry_run = options["dry_run"] + ignore_patterns = options["ignore_patterns"] + if options["use_default_ignore_patterns"]: + ignore_patterns += apps.get_app_config("staticfiles").ignore_patterns + self.ignore_patterns = list({os.path.normpath(p) for p in ignore_patterns}) + self.post_process = options["post_process"] + + def collect(self): + """ + Perform the bulk of the work of collectstatic. + + Split off from handle() to facilitate testing. + """ + if self.symlink and not self.local: + raise CommandError("Can't symlink to a remote destination.") + + if self.clear: + self.clear_dir("") + + if self.symlink: + handler = self.link_file + else: + handler = self.copy_file + + found_files = {} + for finder in get_finders(): + for path, storage in finder.list(self.ignore_patterns): + # Prefix the relative path if the source storage contains it + if getattr(storage, "prefix", None): + prefixed_path = os.path.join(storage.prefix, path) + else: + prefixed_path = path + + if prefixed_path not in found_files: + found_files[prefixed_path] = (storage, path) + handler(path, prefixed_path, storage) + else: + self.log( + "Found another file with the destination path '%s'. It " + "will be ignored since only the first encountered file " + "is collected. If this is not what you want, make sure " + "every static file has a unique path." % prefixed_path, + level=1, + ) + + # Storage backends may define a post_process() method. + if self.post_process and hasattr(self.storage, "post_process"): + processor = self.storage.post_process(found_files, dry_run=self.dry_run) + for original_path, processed_path, processed in processor: + if isinstance(processed, Exception): + self.stderr.write("Post-processing '%s' failed!" % original_path) + # Add a blank line before the traceback, otherwise it's + # too easy to miss the relevant part of the error message. + self.stderr.write() + raise processed + if processed: + self.log( + "Post-processed '%s' as '%s'" % (original_path, processed_path), + level=2, + ) + self.post_processed_files.append(original_path) + else: + self.log("Skipped post-processing '%s'" % original_path) + + return { + "modified": self.copied_files + self.symlinked_files, + "unmodified": self.unmodified_files, + "post_processed": self.post_processed_files, + } + + def handle(self, **options): + self.set_options(**options) + message = ["\n"] + if self.dry_run: + message.append( + "You have activated the --dry-run option so no files will be " + "modified.\n\n" + ) + + message.append( + "You have requested to collect static files at the destination\n" + "location as specified in your settings" + ) + + if self.is_local_storage() and self.storage.location: + destination_path = self.storage.location + message.append(":\n\n %s\n\n" % destination_path) + should_warn_user = self.storage.exists(destination_path) and any( + self.storage.listdir(destination_path) + ) + else: + destination_path = None + message.append(".\n\n") + # Destination files existence not checked; play it safe and warn. + should_warn_user = True + + if self.interactive and should_warn_user: + if self.clear: + message.append("This will DELETE ALL FILES in this location!\n") + else: + message.append("This will overwrite existing files!\n") + + message.append( + "Are you sure you want to do this?\n\n" + "Type 'yes' to continue, or 'no' to cancel: " + ) + if input("".join(message)) != "yes": + raise CommandError("Collecting static files cancelled.") + + collected = self.collect() + + if self.verbosity >= 1: + modified_count = len(collected["modified"]) + unmodified_count = len(collected["unmodified"]) + post_processed_count = len(collected["post_processed"]) + return ( + "\n%(modified_count)s %(identifier)s %(action)s" + "%(destination)s%(unmodified)s%(post_processed)s." + ) % { + "modified_count": modified_count, + "identifier": "static file" + ("" if modified_count == 1 else "s"), + "action": "symlinked" if self.symlink else "copied", + "destination": ( + " to '%s'" % destination_path if destination_path else "" + ), + "unmodified": ( + ", %s unmodified" % unmodified_count + if collected["unmodified"] + else "" + ), + "post_processed": ( + collected["post_processed"] + and ", %s post-processed" % post_processed_count + or "" + ), + } + + def log(self, msg, level=2): + """ + Small log helper + """ + if self.verbosity >= level: + self.stdout.write(msg) + + def is_local_storage(self): + return isinstance(self.storage, FileSystemStorage) + + def clear_dir(self, path): + """ + Delete the given relative path using the destination storage backend. + """ + if not self.storage.exists(path): + return + + dirs, files = self.storage.listdir(path) + for f in files: + fpath = os.path.join(path, f) + if self.dry_run: + self.log("Pretending to delete '%s'" % fpath, level=1) + else: + self.log("Deleting '%s'" % fpath, level=1) + try: + full_path = self.storage.path(fpath) + except NotImplementedError: + self.storage.delete(fpath) + else: + if not os.path.exists(full_path) and os.path.lexists(full_path): + # Delete broken symlinks + os.unlink(full_path) + else: + self.storage.delete(fpath) + for d in dirs: + self.clear_dir(os.path.join(path, d)) + + def delete_file(self, path, prefixed_path, source_storage): + """ + Check if the target file should be deleted if it already exists. + """ + if self.storage.exists(prefixed_path): + try: + # When was the target file modified last time? + target_last_modified = self.storage.get_modified_time(prefixed_path) + except (OSError, NotImplementedError, AttributeError): + # The storage doesn't support get_modified_time() or failed + pass + else: + try: + # When was the source file modified last time? + source_last_modified = source_storage.get_modified_time(path) + except (OSError, NotImplementedError, AttributeError): + pass + else: + # The full path of the target file + if self.local: + full_path = self.storage.path(prefixed_path) + # If it's --link mode and the path isn't a link (i.e. + # the previous collectstatic wasn't with --link) or if + # it's non-link mode and the path is a link (i.e. the + # previous collectstatic was with --link), the old + # links/files must be deleted so it's not safe to skip + # unmodified files. + can_skip_unmodified_files = not ( + self.symlink ^ os.path.islink(full_path) + ) + else: + # In remote storages, skipping is only based on the + # modified times since symlinks aren't relevant. + can_skip_unmodified_files = True + # Avoid sub-second precision (see #14665, #19540) + file_is_unmodified = target_last_modified.replace( + microsecond=0 + ) >= source_last_modified.replace(microsecond=0) + if file_is_unmodified and can_skip_unmodified_files: + if prefixed_path not in self.unmodified_files: + self.unmodified_files.append(prefixed_path) + self.log("Skipping '%s' (not modified)" % path) + return False + # Then delete the existing file if really needed + if self.dry_run: + self.log("Pretending to delete '%s'" % path) + else: + self.log("Deleting '%s'" % path) + self.storage.delete(prefixed_path) + return True + + def link_file(self, path, prefixed_path, source_storage): + """ + Attempt to link ``path`` + """ + # Skip this file if it was already copied earlier + if prefixed_path in self.symlinked_files: + return self.log("Skipping '%s' (already linked earlier)" % path) + # Delete the target file if needed or break + if not self.delete_file(path, prefixed_path, source_storage): + return + # The full path of the source file + source_path = source_storage.path(path) + # Finally link the file + if self.dry_run: + self.log("Pretending to link '%s'" % source_path, level=1) + else: + self.log("Linking '%s'" % source_path, level=2) + full_path = self.storage.path(prefixed_path) + os.makedirs(os.path.dirname(full_path), exist_ok=True) + try: + if os.path.lexists(full_path): + os.unlink(full_path) + os.symlink(source_path, full_path) + except NotImplementedError: + import platform + + raise CommandError( + "Symlinking is not supported in this " + "platform (%s)." % platform.platform() + ) + except OSError as e: + raise CommandError(e) + if prefixed_path not in self.symlinked_files: + self.symlinked_files.append(prefixed_path) + + def copy_file(self, path, prefixed_path, source_storage): + """ + Attempt to copy ``path`` with storage + """ + # Skip this file if it was already copied earlier + if prefixed_path in self.copied_files: + return self.log("Skipping '%s' (already copied earlier)" % path) + # Delete the target file if needed or break + if not self.delete_file(path, prefixed_path, source_storage): + return + # The full path of the source file + source_path = source_storage.path(path) + # Finally start copying + if self.dry_run: + self.log("Pretending to copy '%s'" % source_path, level=1) + else: + self.log("Copying '%s'" % source_path, level=2) + with source_storage.open(path) as source_file: + self.storage.save(prefixed_path, source_file) + self.copied_files.append(prefixed_path) diff --git a/testbed/django__django/django/contrib/staticfiles/management/commands/findstatic.py b/testbed/django__django/django/contrib/staticfiles/management/commands/findstatic.py new file mode 100644 index 0000000000000000000000000000000000000000..97413a64af869dccb76f461c4c0502d7c07cac2c --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/management/commands/findstatic.py @@ -0,0 +1,48 @@ +import os + +from django.contrib.staticfiles import finders +from django.core.management.base import LabelCommand + + +class Command(LabelCommand): + help = "Finds the absolute paths for the given static file(s)." + label = "staticfile" + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + "--first", + action="store_false", + dest="all", + help="Only return the first match for each static file.", + ) + + def handle_label(self, path, **options): + verbosity = options["verbosity"] + result = finders.find(path, all=options["all"]) + if verbosity >= 2: + searched_locations = ( + "\nLooking in the following locations:\n %s" + % "\n ".join([str(loc) for loc in finders.searched_locations]) + ) + else: + searched_locations = "" + if result: + if not isinstance(result, (list, tuple)): + result = [result] + result = (os.path.realpath(path) for path in result) + if verbosity >= 1: + file_list = "\n ".join(result) + return "Found '%s' here:\n %s%s" % ( + path, + file_list, + searched_locations, + ) + else: + return "\n".join(result) + else: + message = ["No matching file found for '%s'." % path] + if verbosity >= 2: + message.append(searched_locations) + if verbosity >= 1: + self.stderr.write("\n".join(message)) diff --git a/testbed/django__django/django/contrib/staticfiles/management/commands/runserver.py b/testbed/django__django/django/contrib/staticfiles/management/commands/runserver.py new file mode 100644 index 0000000000000000000000000000000000000000..fd9ddb16a42419da355d4a068d2f4abec01630aa --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/management/commands/runserver.py @@ -0,0 +1,36 @@ +from django.conf import settings +from django.contrib.staticfiles.handlers import StaticFilesHandler +from django.core.management.commands.runserver import Command as RunserverCommand + + +class Command(RunserverCommand): + help = ( + "Starts a lightweight web server for development and also serves static files." + ) + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + "--nostatic", + action="store_false", + dest="use_static_handler", + help="Tells Django to NOT automatically serve static files at STATIC_URL.", + ) + parser.add_argument( + "--insecure", + action="store_true", + dest="insecure_serving", + help="Allows serving static files even if DEBUG is False.", + ) + + def get_handler(self, *args, **options): + """ + Return the static files serving handler wrapping the default handler, + if static files should be served. Otherwise return the default handler. + """ + handler = super().get_handler(*args, **options) + use_static_handler = options["use_static_handler"] + insecure_serving = options["insecure_serving"] + if use_static_handler and (settings.DEBUG or insecure_serving): + return StaticFilesHandler(handler) + return handler diff --git a/testbed/django__django/django/contrib/staticfiles/storage.py b/testbed/django__django/django/contrib/staticfiles/storage.py new file mode 100644 index 0000000000000000000000000000000000000000..186f8d7535e9bace6e3cfa0e1caf4d8a93fe3dd5 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/storage.py @@ -0,0 +1,543 @@ +import json +import os +import posixpath +import re +from hashlib import md5 +from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit + +from django.conf import STATICFILES_STORAGE_ALIAS, settings +from django.contrib.staticfiles.utils import check_settings, matches_patterns +from django.core.exceptions import ImproperlyConfigured +from django.core.files.base import ContentFile +from django.core.files.storage import FileSystemStorage, storages +from django.utils.functional import LazyObject + + +class StaticFilesStorage(FileSystemStorage): + """ + Standard file system storage for static files. + + The defaults for ``location`` and ``base_url`` are + ``STATIC_ROOT`` and ``STATIC_URL``. + """ + + def __init__(self, location=None, base_url=None, *args, **kwargs): + if location is None: + location = settings.STATIC_ROOT + if base_url is None: + base_url = settings.STATIC_URL + check_settings(base_url) + super().__init__(location, base_url, *args, **kwargs) + # FileSystemStorage fallbacks to MEDIA_ROOT when location + # is empty, so we restore the empty value. + if not location: + self.base_location = None + self.location = None + + def path(self, name): + if not self.location: + raise ImproperlyConfigured( + "You're using the staticfiles app " + "without having set the STATIC_ROOT " + "setting to a filesystem path." + ) + return super().path(name) + + +class HashedFilesMixin: + default_template = """url("%(url)s")""" + max_post_process_passes = 5 + support_js_module_import_aggregation = False + _js_module_import_aggregation_patterns = ( + "*.js", + ( + ( + ( + r"""(?Pimport(?s:(?P[\s\{].*?))""" + r"""\s*from\s*['"](?P[\.\/].*?)["']\s*;)""" + ), + """import%(import)s from "%(url)s";""", + ), + ( + ( + r"""(?Pexport(?s:(?P[\s\{].*?))""" + r"""\s*from\s*["'](?P[\.\/].*?)["']\s*;)""" + ), + """export%(exports)s from "%(url)s";""", + ), + ( + r"""(?Pimport\s*['"](?P[\.\/].*?)["']\s*;)""", + """import"%(url)s";""", + ), + ( + r"""(?Pimport\(["'](?P.*?)["']\))""", + """import("%(url)s")""", + ), + ), + ) + patterns = ( + ( + "*.css", + ( + r"""(?Purl\(['"]{0,1}\s*(?P.*?)["']{0,1}\))""", + ( + r"""(?P@import\s*["']\s*(?P.*?)["'])""", + """@import url("%(url)s")""", + ), + ( + ( + r"(?m)^(?P/\*#[ \t]" + r"(?-i:sourceMappingURL)=(?P.*)[ \t]*\*/)$" + ), + "/*# sourceMappingURL=%(url)s */", + ), + ), + ), + ( + "*.js", + ( + ( + r"(?m)^(?P//# (?-i:sourceMappingURL)=(?P.*))$", + "//# sourceMappingURL=%(url)s", + ), + ), + ), + ) + keep_intermediate_files = True + + def __init__(self, *args, **kwargs): + if self.support_js_module_import_aggregation: + self.patterns += (self._js_module_import_aggregation_patterns,) + super().__init__(*args, **kwargs) + self._patterns = {} + self.hashed_files = {} + for extension, patterns in self.patterns: + for pattern in patterns: + if isinstance(pattern, (tuple, list)): + pattern, template = pattern + else: + template = self.default_template + compiled = re.compile(pattern, re.IGNORECASE) + self._patterns.setdefault(extension, []).append((compiled, template)) + + def file_hash(self, name, content=None): + """ + Return a hash of the file with the given name and optional content. + """ + if content is None: + return None + hasher = md5(usedforsecurity=False) + for chunk in content.chunks(): + hasher.update(chunk) + return hasher.hexdigest()[:12] + + def hashed_name(self, name, content=None, filename=None): + # `filename` is the name of file to hash if `content` isn't given. + # `name` is the base name to construct the new hashed filename from. + parsed_name = urlsplit(unquote(name)) + clean_name = parsed_name.path.strip() + filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name + opened = content is None + if opened: + if not self.exists(filename): + raise ValueError( + "The file '%s' could not be found with %r." % (filename, self) + ) + try: + content = self.open(filename) + except OSError: + # Handle directory paths and fragments + return name + try: + file_hash = self.file_hash(clean_name, content) + finally: + if opened: + content.close() + path, filename = os.path.split(clean_name) + root, ext = os.path.splitext(filename) + file_hash = (".%s" % file_hash) if file_hash else "" + hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext)) + unparsed_name = list(parsed_name) + unparsed_name[2] = hashed_name + # Special casing for a @font-face hack, like url(myfont.eot?#iefix") + # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax + if "?#" in name and not unparsed_name[3]: + unparsed_name[2] += "?" + return urlunsplit(unparsed_name) + + def _url(self, hashed_name_func, name, force=False, hashed_files=None): + """ + Return the non-hashed URL in DEBUG mode. + """ + if settings.DEBUG and not force: + hashed_name, fragment = name, "" + else: + clean_name, fragment = urldefrag(name) + if urlsplit(clean_name).path.endswith("/"): # don't hash paths + hashed_name = name + else: + args = (clean_name,) + if hashed_files is not None: + args += (hashed_files,) + hashed_name = hashed_name_func(*args) + + final_url = super().url(hashed_name) + + # Special casing for a @font-face hack, like url(myfont.eot?#iefix") + # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax + query_fragment = "?#" in name # [sic!] + if fragment or query_fragment: + urlparts = list(urlsplit(final_url)) + if fragment and not urlparts[4]: + urlparts[4] = fragment + if query_fragment and not urlparts[3]: + urlparts[2] += "?" + final_url = urlunsplit(urlparts) + + return unquote(final_url) + + def url(self, name, force=False): + """ + Return the non-hashed URL in DEBUG mode. + """ + return self._url(self.stored_name, name, force) + + def url_converter(self, name, hashed_files, template=None): + """ + Return the custom URL converter for the given file name. + """ + if template is None: + template = self.default_template + + def converter(matchobj): + """ + Convert the matched URL to a normalized and hashed URL. + + This requires figuring out which files the matched URL resolves + to and calling the url() method of the storage. + """ + matches = matchobj.groupdict() + matched = matches["matched"] + url = matches["url"] + + # Ignore absolute/protocol-relative and data-uri URLs. + if re.match(r"^[a-z]+:", url): + return matched + + # Ignore absolute URLs that don't point to a static file (dynamic + # CSS / JS?). Note that STATIC_URL cannot be empty. + if url.startswith("/") and not url.startswith(settings.STATIC_URL): + return matched + + # Strip off the fragment so a path-like fragment won't interfere. + url_path, fragment = urldefrag(url) + + # Ignore URLs without a path + if not url_path: + return matched + + if url_path.startswith("/"): + # Otherwise the condition above would have returned prematurely. + assert url_path.startswith(settings.STATIC_URL) + target_name = url_path.removeprefix(settings.STATIC_URL) + else: + # We're using the posixpath module to mix paths and URLs conveniently. + source_name = name if os.sep == "/" else name.replace(os.sep, "/") + target_name = posixpath.join(posixpath.dirname(source_name), url_path) + + # Determine the hashed name of the target file with the storage backend. + hashed_url = self._url( + self._stored_name, + unquote(target_name), + force=True, + hashed_files=hashed_files, + ) + + transformed_url = "/".join( + url_path.split("/")[:-1] + hashed_url.split("/")[-1:] + ) + + # Restore the fragment that was stripped off earlier. + if fragment: + transformed_url += ("?#" if "?#" in url else "#") + fragment + + # Return the hashed version to the file + matches["url"] = unquote(transformed_url) + return template % matches + + return converter + + def post_process(self, paths, dry_run=False, **options): + """ + Post process the given dictionary of files (called from collectstatic). + + Processing is actually two separate operations: + + 1. renaming files to include a hash of their content for cache-busting, + and copying those files to the target storage. + 2. adjusting files which contain references to other files so they + refer to the cache-busting filenames. + + If either of these are performed on a file, then that file is considered + post-processed. + """ + # don't even dare to process the files if we're in dry run mode + if dry_run: + return + + # where to store the new paths + hashed_files = {} + + # build a list of adjustable files + adjustable_paths = [ + path for path in paths if matches_patterns(path, self._patterns) + ] + + # Adjustable files to yield at end, keyed by the original path. + processed_adjustable_paths = {} + + # Do a single pass first. Post-process all files once, yielding not + # adjustable files and exceptions, and collecting adjustable files. + for name, hashed_name, processed, _ in self._post_process( + paths, adjustable_paths, hashed_files + ): + if name not in adjustable_paths or isinstance(processed, Exception): + yield name, hashed_name, processed + else: + processed_adjustable_paths[name] = (name, hashed_name, processed) + + paths = {path: paths[path] for path in adjustable_paths} + substitutions = False + + for i in range(self.max_post_process_passes): + substitutions = False + for name, hashed_name, processed, subst in self._post_process( + paths, adjustable_paths, hashed_files + ): + # Overwrite since hashed_name may be newer. + processed_adjustable_paths[name] = (name, hashed_name, processed) + substitutions = substitutions or subst + + if not substitutions: + break + + if substitutions: + yield "All", None, RuntimeError("Max post-process passes exceeded.") + + # Store the processed paths + self.hashed_files.update(hashed_files) + + # Yield adjustable files with final, hashed name. + yield from processed_adjustable_paths.values() + + def _post_process(self, paths, adjustable_paths, hashed_files): + # Sort the files by directory level + def path_level(name): + return len(name.split(os.sep)) + + for name in sorted(paths, key=path_level, reverse=True): + substitutions = True + # use the original, local file, not the copied-but-unprocessed + # file, which might be somewhere far away, like S3 + storage, path = paths[name] + with storage.open(path) as original_file: + cleaned_name = self.clean_name(name) + hash_key = self.hash_key(cleaned_name) + + # generate the hash with the original content, even for + # adjustable files. + if hash_key not in hashed_files: + hashed_name = self.hashed_name(name, original_file) + else: + hashed_name = hashed_files[hash_key] + + # then get the original's file content.. + if hasattr(original_file, "seek"): + original_file.seek(0) + + hashed_file_exists = self.exists(hashed_name) + processed = False + + # ..to apply each replacement pattern to the content + if name in adjustable_paths: + old_hashed_name = hashed_name + try: + content = original_file.read().decode("utf-8") + except UnicodeDecodeError as exc: + yield name, None, exc, False + for extension, patterns in self._patterns.items(): + if matches_patterns(path, (extension,)): + for pattern, template in patterns: + converter = self.url_converter( + name, hashed_files, template + ) + try: + content = pattern.sub(converter, content) + except ValueError as exc: + yield name, None, exc, False + if hashed_file_exists: + self.delete(hashed_name) + # then save the processed result + content_file = ContentFile(content.encode()) + if self.keep_intermediate_files: + # Save intermediate file for reference + self._save(hashed_name, content_file) + hashed_name = self.hashed_name(name, content_file) + + if self.exists(hashed_name): + self.delete(hashed_name) + + saved_name = self._save(hashed_name, content_file) + hashed_name = self.clean_name(saved_name) + # If the file hash stayed the same, this file didn't change + if old_hashed_name == hashed_name: + substitutions = False + processed = True + + if not processed: + # or handle the case in which neither processing nor + # a change to the original file happened + if not hashed_file_exists: + processed = True + saved_name = self._save(hashed_name, original_file) + hashed_name = self.clean_name(saved_name) + + # and then set the cache accordingly + hashed_files[hash_key] = hashed_name + + yield name, hashed_name, processed, substitutions + + def clean_name(self, name): + return name.replace("\\", "/") + + def hash_key(self, name): + return name + + def _stored_name(self, name, hashed_files): + # Normalize the path to avoid multiple names for the same file like + # ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same + # path. + name = posixpath.normpath(name) + cleaned_name = self.clean_name(name) + hash_key = self.hash_key(cleaned_name) + cache_name = hashed_files.get(hash_key) + if cache_name is None: + cache_name = self.clean_name(self.hashed_name(name)) + return cache_name + + def stored_name(self, name): + cleaned_name = self.clean_name(name) + hash_key = self.hash_key(cleaned_name) + cache_name = self.hashed_files.get(hash_key) + if cache_name: + return cache_name + # No cached name found, recalculate it from the files. + intermediate_name = name + for i in range(self.max_post_process_passes + 1): + cache_name = self.clean_name( + self.hashed_name(name, content=None, filename=intermediate_name) + ) + if intermediate_name == cache_name: + # Store the hashed name if there was a miss. + self.hashed_files[hash_key] = cache_name + return cache_name + else: + # Move on to the next intermediate file. + intermediate_name = cache_name + # If the cache name can't be determined after the max number of passes, + # the intermediate files on disk may be corrupt; avoid an infinite loop. + raise ValueError("The name '%s' could not be hashed with %r." % (name, self)) + + +class ManifestFilesMixin(HashedFilesMixin): + manifest_version = "1.1" # the manifest format standard + manifest_name = "staticfiles.json" + manifest_strict = True + keep_intermediate_files = False + + def __init__(self, *args, manifest_storage=None, **kwargs): + super().__init__(*args, **kwargs) + if manifest_storage is None: + manifest_storage = self + self.manifest_storage = manifest_storage + self.hashed_files, self.manifest_hash = self.load_manifest() + + def read_manifest(self): + try: + with self.manifest_storage.open(self.manifest_name) as manifest: + return manifest.read().decode() + except FileNotFoundError: + return None + + def load_manifest(self): + content = self.read_manifest() + if content is None: + return {}, "" + try: + stored = json.loads(content) + except json.JSONDecodeError: + pass + else: + version = stored.get("version") + if version in ("1.0", "1.1"): + return stored.get("paths", {}), stored.get("hash", "") + raise ValueError( + "Couldn't load manifest '%s' (version %s)" + % (self.manifest_name, self.manifest_version) + ) + + def post_process(self, *args, **kwargs): + self.hashed_files = {} + yield from super().post_process(*args, **kwargs) + if not kwargs.get("dry_run"): + self.save_manifest() + + def save_manifest(self): + self.manifest_hash = self.file_hash( + None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode()) + ) + payload = { + "paths": self.hashed_files, + "version": self.manifest_version, + "hash": self.manifest_hash, + } + if self.manifest_storage.exists(self.manifest_name): + self.manifest_storage.delete(self.manifest_name) + contents = json.dumps(payload).encode() + self.manifest_storage._save(self.manifest_name, ContentFile(contents)) + + def stored_name(self, name): + parsed_name = urlsplit(unquote(name)) + clean_name = parsed_name.path.strip() + hash_key = self.hash_key(clean_name) + cache_name = self.hashed_files.get(hash_key) + if cache_name is None: + if self.manifest_strict: + raise ValueError( + "Missing staticfiles manifest entry for '%s'" % clean_name + ) + cache_name = self.clean_name(self.hashed_name(name)) + unparsed_name = list(parsed_name) + unparsed_name[2] = cache_name + # Special casing for a @font-face hack, like url(myfont.eot?#iefix") + # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax + if "?#" in name and not unparsed_name[3]: + unparsed_name[2] += "?" + return urlunsplit(unparsed_name) + + +class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage): + """ + A static file system storage backend which also saves + hashed copies of the files it saves. + """ + + pass + + +class ConfiguredStorage(LazyObject): + def _setup(self): + self._wrapped = storages[STATICFILES_STORAGE_ALIAS] + + +staticfiles_storage = ConfiguredStorage() diff --git a/testbed/django__django/django/contrib/staticfiles/testing.py b/testbed/django__django/django/contrib/staticfiles/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..546a24ae177c397f92d90cdf63da550ad1729ab4 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/testing.py @@ -0,0 +1,13 @@ +from django.contrib.staticfiles.handlers import StaticFilesHandler +from django.test import LiveServerTestCase + + +class StaticLiveServerTestCase(LiveServerTestCase): + """ + Extend django.test.LiveServerTestCase to transparently overlay at test + execution-time the assets provided by the staticfiles app finders. This + means you don't need to run collectstatic before or as a part of your tests + setup. + """ + + static_handler = StaticFilesHandler diff --git a/testbed/django__django/django/contrib/staticfiles/urls.py b/testbed/django__django/django/contrib/staticfiles/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..6278f35b3573921bb4df0614bfa50e6787dc461e --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/urls.py @@ -0,0 +1,19 @@ +from django.conf import settings +from django.conf.urls.static import static +from django.contrib.staticfiles.views import serve + +urlpatterns = [] + + +def staticfiles_urlpatterns(prefix=None): + """ + Helper function to return a URL pattern for serving static files. + """ + if prefix is None: + prefix = settings.STATIC_URL + return static(prefix, view=serve) + + +# Only append if urlpatterns are empty +if settings.DEBUG and not urlpatterns: + urlpatterns += staticfiles_urlpatterns() diff --git a/testbed/django__django/django/contrib/staticfiles/utils.py b/testbed/django__django/django/contrib/staticfiles/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..efd67ac8e87c4bb0f2a8208e52d3479f56e5c8c2 --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/utils.py @@ -0,0 +1,71 @@ +import fnmatch +import os + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured + + +def matches_patterns(path, patterns): + """ + Return True or False depending on whether the ``path`` should be + ignored (if it matches any pattern in ``ignore_patterns``). + """ + return any(fnmatch.fnmatchcase(path, pattern) for pattern in patterns) + + +def get_files(storage, ignore_patterns=None, location=""): + """ + Recursively walk the storage directories yielding the paths + of all files that should be copied. + """ + if ignore_patterns is None: + ignore_patterns = [] + directories, files = storage.listdir(location) + for fn in files: + # Match only the basename. + if matches_patterns(fn, ignore_patterns): + continue + if location: + fn = os.path.join(location, fn) + # Match the full file path. + if matches_patterns(fn, ignore_patterns): + continue + yield fn + for dir in directories: + if matches_patterns(dir, ignore_patterns): + continue + if location: + dir = os.path.join(location, dir) + yield from get_files(storage, ignore_patterns, dir) + + +def check_settings(base_url=None): + """ + Check if the staticfiles settings have sane values. + """ + if base_url is None: + base_url = settings.STATIC_URL + if not base_url: + raise ImproperlyConfigured( + "You're using the staticfiles app " + "without having set the required STATIC_URL setting." + ) + if settings.MEDIA_URL == base_url: + raise ImproperlyConfigured( + "The MEDIA_URL and STATIC_URL settings must have different values" + ) + if ( + settings.DEBUG + and settings.MEDIA_URL + and settings.STATIC_URL + and settings.MEDIA_URL.startswith(settings.STATIC_URL) + ): + raise ImproperlyConfigured( + "runserver can't serve media if MEDIA_URL is within STATIC_URL." + ) + if (settings.MEDIA_ROOT and settings.STATIC_ROOT) and ( + settings.MEDIA_ROOT == settings.STATIC_ROOT + ): + raise ImproperlyConfigured( + "The MEDIA_ROOT and STATIC_ROOT settings must have different values" + ) diff --git a/testbed/django__django/django/contrib/staticfiles/views.py b/testbed/django__django/django/contrib/staticfiles/views.py new file mode 100644 index 0000000000000000000000000000000000000000..83d04d4cec11452eb95dc1a2a396c7fa8f7bfc0d --- /dev/null +++ b/testbed/django__django/django/contrib/staticfiles/views.py @@ -0,0 +1,39 @@ +""" +Views and functions for serving static files. These are only to be used during +development, and SHOULD NOT be used in a production setting. + +""" +import os +import posixpath + +from django.conf import settings +from django.contrib.staticfiles import finders +from django.http import Http404 +from django.views import static + + +def serve(request, path, insecure=False, **kwargs): + """ + Serve static files below a given point in the directory structure or + from locations inferred from the staticfiles finders. + + To use, put a URL pattern such as:: + + from django.contrib.staticfiles import views + + path('', views.serve) + + in your URLconf. + + It uses the django.views.static.serve() view to serve the found files. + """ + if not settings.DEBUG and not insecure: + raise Http404 + normalized_path = posixpath.normpath(path).lstrip("/") + absolute_path = finders.find(normalized_path) + if not absolute_path: + if path.endswith("/") or path == "": + raise Http404("Directory indexes are not allowed here.") + raise Http404("'%s' could not be found" % path) + document_root, path = os.path.split(absolute_path) + return static.serve(request, path, document_root=document_root, **kwargs) diff --git a/testbed/django__django/django/contrib/syndication/__init__.py b/testbed/django__django/django/contrib/syndication/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/contrib/syndication/apps.py b/testbed/django__django/django/contrib/syndication/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..bb0f86aa21f86cd8fefb4eea7f5a5dae603b5519 --- /dev/null +++ b/testbed/django__django/django/contrib/syndication/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig +from django.utils.translation import gettext_lazy as _ + + +class SyndicationConfig(AppConfig): + name = "django.contrib.syndication" + verbose_name = _("Syndication") diff --git a/testbed/django__django/django/contrib/syndication/views.py b/testbed/django__django/django/contrib/syndication/views.py new file mode 100644 index 0000000000000000000000000000000000000000..2378a14874d2357c81e0a98e33fb84885cbf3442 --- /dev/null +++ b/testbed/django__django/django/contrib/syndication/views.py @@ -0,0 +1,234 @@ +from inspect import getattr_static, unwrap + +from django.contrib.sites.shortcuts import get_current_site +from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist +from django.http import Http404, HttpResponse +from django.template import TemplateDoesNotExist, loader +from django.utils import feedgenerator +from django.utils.encoding import iri_to_uri +from django.utils.html import escape +from django.utils.http import http_date +from django.utils.timezone import get_default_timezone, is_naive, make_aware +from django.utils.translation import get_language + + +def add_domain(domain, url, secure=False): + protocol = "https" if secure else "http" + if url.startswith("//"): + # Support network-path reference (see #16753) - RSS requires a protocol + url = "%s:%s" % (protocol, url) + elif not url.startswith(("http://", "https://", "mailto:")): + url = iri_to_uri("%s://%s%s" % (protocol, domain, url)) + return url + + +class FeedDoesNotExist(ObjectDoesNotExist): + pass + + +class Feed: + feed_type = feedgenerator.DefaultFeed + title_template = None + description_template = None + language = None + + def __call__(self, request, *args, **kwargs): + try: + obj = self.get_object(request, *args, **kwargs) + except ObjectDoesNotExist: + raise Http404("Feed object does not exist.") + feedgen = self.get_feed(obj, request) + response = HttpResponse(content_type=feedgen.content_type) + if hasattr(self, "item_pubdate") or hasattr(self, "item_updateddate"): + # if item_pubdate or item_updateddate is defined for the feed, set + # header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED + response.headers["Last-Modified"] = http_date( + feedgen.latest_post_date().timestamp() + ) + feedgen.write(response, "utf-8") + return response + + def item_title(self, item): + # Titles should be double escaped by default (see #6533) + return escape(str(item)) + + def item_description(self, item): + return str(item) + + def item_link(self, item): + try: + return item.get_absolute_url() + except AttributeError: + raise ImproperlyConfigured( + "Give your %s class a get_absolute_url() method, or define an " + "item_link() method in your Feed class." % item.__class__.__name__ + ) + + def item_enclosures(self, item): + enc_url = self._get_dynamic_attr("item_enclosure_url", item) + if enc_url: + enc = feedgenerator.Enclosure( + url=str(enc_url), + length=str(self._get_dynamic_attr("item_enclosure_length", item)), + mime_type=str(self._get_dynamic_attr("item_enclosure_mime_type", item)), + ) + return [enc] + return [] + + def _get_dynamic_attr(self, attname, obj, default=None): + try: + attr = getattr(self, attname) + except AttributeError: + return default + if callable(attr): + # Check co_argcount rather than try/excepting the function and + # catching the TypeError, because something inside the function + # may raise the TypeError. This technique is more accurate. + func = unwrap(attr) + try: + code = func.__code__ + except AttributeError: + func = unwrap(attr.__call__) + code = func.__code__ + # If function doesn't have arguments and it is not a static method, + # it was decorated without using @functools.wraps. + if not code.co_argcount and not isinstance( + getattr_static(self, func.__name__, None), staticmethod + ): + raise ImproperlyConfigured( + f"Feed method {attname!r} decorated by {func.__name__!r} needs to " + f"use @functools.wraps." + ) + if code.co_argcount == 2: # one argument is 'self' + return attr(obj) + else: + return attr() + return attr + + def feed_extra_kwargs(self, obj): + """ + Return an extra keyword arguments dictionary that is used when + initializing the feed generator. + """ + return {} + + def item_extra_kwargs(self, item): + """ + Return an extra keyword arguments dictionary that is used with + the `add_item` call of the feed generator. + """ + return {} + + def get_object(self, request, *args, **kwargs): + return None + + def get_context_data(self, **kwargs): + """ + Return a dictionary to use as extra context if either + ``self.description_template`` or ``self.item_template`` are used. + + Default implementation preserves the old behavior + of using {'obj': item, 'site': current_site} as the context. + """ + return {"obj": kwargs.get("item"), "site": kwargs.get("site")} + + def get_feed(self, obj, request): + """ + Return a feedgenerator.DefaultFeed object, fully populated, for + this feed. Raise FeedDoesNotExist for invalid parameters. + """ + current_site = get_current_site(request) + + link = self._get_dynamic_attr("link", obj) + link = add_domain(current_site.domain, link, request.is_secure()) + + feed = self.feed_type( + title=self._get_dynamic_attr("title", obj), + subtitle=self._get_dynamic_attr("subtitle", obj), + link=link, + description=self._get_dynamic_attr("description", obj), + language=self.language or get_language(), + feed_url=add_domain( + current_site.domain, + self._get_dynamic_attr("feed_url", obj) or request.path, + request.is_secure(), + ), + author_name=self._get_dynamic_attr("author_name", obj), + author_link=self._get_dynamic_attr("author_link", obj), + author_email=self._get_dynamic_attr("author_email", obj), + categories=self._get_dynamic_attr("categories", obj), + feed_copyright=self._get_dynamic_attr("feed_copyright", obj), + feed_guid=self._get_dynamic_attr("feed_guid", obj), + ttl=self._get_dynamic_attr("ttl", obj), + **self.feed_extra_kwargs(obj), + ) + + title_tmp = None + if self.title_template is not None: + try: + title_tmp = loader.get_template(self.title_template) + except TemplateDoesNotExist: + pass + + description_tmp = None + if self.description_template is not None: + try: + description_tmp = loader.get_template(self.description_template) + except TemplateDoesNotExist: + pass + + for item in self._get_dynamic_attr("items", obj): + context = self.get_context_data( + item=item, site=current_site, obj=obj, request=request + ) + if title_tmp is not None: + title = title_tmp.render(context, request) + else: + title = self._get_dynamic_attr("item_title", item) + if description_tmp is not None: + description = description_tmp.render(context, request) + else: + description = self._get_dynamic_attr("item_description", item) + link = add_domain( + current_site.domain, + self._get_dynamic_attr("item_link", item), + request.is_secure(), + ) + enclosures = self._get_dynamic_attr("item_enclosures", item) + author_name = self._get_dynamic_attr("item_author_name", item) + if author_name is not None: + author_email = self._get_dynamic_attr("item_author_email", item) + author_link = self._get_dynamic_attr("item_author_link", item) + else: + author_email = author_link = None + + tz = get_default_timezone() + + pubdate = self._get_dynamic_attr("item_pubdate", item) + if pubdate and is_naive(pubdate): + pubdate = make_aware(pubdate, tz) + + updateddate = self._get_dynamic_attr("item_updateddate", item) + if updateddate and is_naive(updateddate): + updateddate = make_aware(updateddate, tz) + + feed.add_item( + title=title, + link=link, + description=description, + unique_id=self._get_dynamic_attr("item_guid", item, link), + unique_id_is_permalink=self._get_dynamic_attr( + "item_guid_is_permalink", item + ), + enclosures=enclosures, + pubdate=pubdate, + updateddate=updateddate, + author_name=author_name, + author_email=author_email, + author_link=author_link, + comments=self._get_dynamic_attr("item_comments", item), + categories=self._get_dynamic_attr("item_categories", item), + item_copyright=self._get_dynamic_attr("item_copyright", item), + **self.item_extra_kwargs(item), + ) + return feed diff --git a/testbed/django__django/django/core/cache/__init__.py b/testbed/django__django/django/core/cache/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..eb7fa5b2e999d38971300b42cf780cecb8632f22 --- /dev/null +++ b/testbed/django__django/django/core/cache/__init__.py @@ -0,0 +1,66 @@ +""" +Caching framework. + +This package defines set of cache backends that all conform to a simple API. +In a nutshell, a cache is a set of values -- which can be any object that +may be pickled -- identified by string keys. For the complete API, see +the abstract BaseCache class in django.core.cache.backends.base. + +Client code should use the `cache` variable defined here to access the default +cache backend and look up non-default cache backends in the `caches` dict-like +object. + +See docs/topics/cache.txt for information on the public API. +""" +from django.core import signals +from django.core.cache.backends.base import ( + BaseCache, + CacheKeyWarning, + InvalidCacheBackendError, + InvalidCacheKey, +) +from django.utils.connection import BaseConnectionHandler, ConnectionProxy +from django.utils.module_loading import import_string + +__all__ = [ + "cache", + "caches", + "DEFAULT_CACHE_ALIAS", + "InvalidCacheBackendError", + "CacheKeyWarning", + "BaseCache", + "InvalidCacheKey", +] + +DEFAULT_CACHE_ALIAS = "default" + + +class CacheHandler(BaseConnectionHandler): + settings_name = "CACHES" + exception_class = InvalidCacheBackendError + + def create_connection(self, alias): + params = self.settings[alias].copy() + backend = params.pop("BACKEND") + location = params.pop("LOCATION", "") + try: + backend_cls = import_string(backend) + except ImportError as e: + raise InvalidCacheBackendError( + "Could not find backend '%s': %s" % (backend, e) + ) from e + return backend_cls(location, params) + + +caches = CacheHandler() + +cache = ConnectionProxy(caches, DEFAULT_CACHE_ALIAS) + + +def close_caches(**kwargs): + # Some caches need to do a cleanup at the end of a request cycle. If not + # implemented in a particular backend cache.close() is a no-op. + caches.close_all() + + +signals.request_finished.connect(close_caches) diff --git a/testbed/django__django/django/core/cache/backends/__init__.py b/testbed/django__django/django/core/cache/backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/cache/backends/base.py b/testbed/django__django/django/core/cache/backends/base.py new file mode 100644 index 0000000000000000000000000000000000000000..09b0a5f9c89a5945885dd02d5fa17da4f0631e64 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/base.py @@ -0,0 +1,405 @@ +"Base Cache class." +import time +import warnings + +from asgiref.sync import sync_to_async + +from django.core.exceptions import ImproperlyConfigured +from django.utils.module_loading import import_string +from django.utils.regex_helper import _lazy_re_compile + + +class InvalidCacheBackendError(ImproperlyConfigured): + pass + + +class CacheKeyWarning(RuntimeWarning): + pass + + +class InvalidCacheKey(ValueError): + pass + + +# Stub class to ensure not passing in a `timeout` argument results in +# the default timeout +DEFAULT_TIMEOUT = object() + +# Memcached does not accept keys longer than this. +MEMCACHE_MAX_KEY_LENGTH = 250 + + +def default_key_func(key, key_prefix, version): + """ + Default function to generate keys. + + Construct the key used by all other methods. By default, prepend + the `key_prefix`. KEY_FUNCTION can be used to specify an alternate + function with custom key making behavior. + """ + return "%s:%s:%s" % (key_prefix, version, key) + + +def get_key_func(key_func): + """ + Function to decide which key function to use. + + Default to ``default_key_func``. + """ + if key_func is not None: + if callable(key_func): + return key_func + else: + return import_string(key_func) + return default_key_func + + +class BaseCache: + _missing_key = object() + + def __init__(self, params): + timeout = params.get("timeout", params.get("TIMEOUT", 300)) + if timeout is not None: + try: + timeout = int(timeout) + except (ValueError, TypeError): + timeout = 300 + self.default_timeout = timeout + + options = params.get("OPTIONS", {}) + max_entries = params.get("max_entries", options.get("MAX_ENTRIES", 300)) + try: + self._max_entries = int(max_entries) + except (ValueError, TypeError): + self._max_entries = 300 + + cull_frequency = params.get("cull_frequency", options.get("CULL_FREQUENCY", 3)) + try: + self._cull_frequency = int(cull_frequency) + except (ValueError, TypeError): + self._cull_frequency = 3 + + self.key_prefix = params.get("KEY_PREFIX", "") + self.version = params.get("VERSION", 1) + self.key_func = get_key_func(params.get("KEY_FUNCTION")) + + def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): + """ + Return the timeout value usable by this backend based upon the provided + timeout. + """ + if timeout == DEFAULT_TIMEOUT: + timeout = self.default_timeout + elif timeout == 0: + # ticket 21147 - avoid time.time() related precision issues + timeout = -1 + return None if timeout is None else time.time() + timeout + + def make_key(self, key, version=None): + """ + Construct the key used by all other methods. By default, use the + key_func to generate a key (which, by default, prepends the + `key_prefix' and 'version'). A different key function can be provided + at the time of cache construction; alternatively, you can subclass the + cache backend to provide custom key making behavior. + """ + if version is None: + version = self.version + + return self.key_func(key, self.key_prefix, version) + + def validate_key(self, key): + """ + Warn about keys that would not be portable to the memcached + backend. This encourages (but does not force) writing backend-portable + cache code. + """ + for warning in memcache_key_warnings(key): + warnings.warn(warning, CacheKeyWarning) + + def make_and_validate_key(self, key, version=None): + """Helper to make and validate keys.""" + key = self.make_key(key, version=version) + self.validate_key(key) + return key + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + """ + Set a value in the cache if the key does not already exist. If + timeout is given, use that timeout for the key; otherwise use the + default cache timeout. + + Return True if the value was stored, False otherwise. + """ + raise NotImplementedError( + "subclasses of BaseCache must provide an add() method" + ) + + async def aadd(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + return await sync_to_async(self.add, thread_sensitive=True)( + key, value, timeout, version + ) + + def get(self, key, default=None, version=None): + """ + Fetch a given key from the cache. If the key does not exist, return + default, which itself defaults to None. + """ + raise NotImplementedError("subclasses of BaseCache must provide a get() method") + + async def aget(self, key, default=None, version=None): + return await sync_to_async(self.get, thread_sensitive=True)( + key, default, version + ) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + """ + Set a value in the cache. If timeout is given, use that timeout for the + key; otherwise use the default cache timeout. + """ + raise NotImplementedError("subclasses of BaseCache must provide a set() method") + + async def aset(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + return await sync_to_async(self.set, thread_sensitive=True)( + key, value, timeout, version + ) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + """ + Update the key's expiry time using timeout. Return True if successful + or False if the key does not exist. + """ + raise NotImplementedError( + "subclasses of BaseCache must provide a touch() method" + ) + + async def atouch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + return await sync_to_async(self.touch, thread_sensitive=True)( + key, timeout, version + ) + + def delete(self, key, version=None): + """ + Delete a key from the cache and return whether it succeeded, failing + silently. + """ + raise NotImplementedError( + "subclasses of BaseCache must provide a delete() method" + ) + + async def adelete(self, key, version=None): + return await sync_to_async(self.delete, thread_sensitive=True)(key, version) + + def get_many(self, keys, version=None): + """ + Fetch a bunch of keys from the cache. For certain backends (memcached, + pgsql) this can be *much* faster when fetching multiple values. + + Return a dict mapping each key in keys to its value. If the given + key is missing, it will be missing from the response dict. + """ + d = {} + for k in keys: + val = self.get(k, self._missing_key, version=version) + if val is not self._missing_key: + d[k] = val + return d + + async def aget_many(self, keys, version=None): + """See get_many().""" + d = {} + for k in keys: + val = await self.aget(k, self._missing_key, version=version) + if val is not self._missing_key: + d[k] = val + return d + + def get_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None): + """ + Fetch a given key from the cache. If the key does not exist, + add the key and set it to the default value. The default value can + also be any callable. If timeout is given, use that timeout for the + key; otherwise use the default cache timeout. + + Return the value of the key stored or retrieved. + """ + val = self.get(key, self._missing_key, version=version) + if val is self._missing_key: + if callable(default): + default = default() + self.add(key, default, timeout=timeout, version=version) + # Fetch the value again to avoid a race condition if another caller + # added a value between the first get() and the add() above. + return self.get(key, default, version=version) + return val + + async def aget_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None): + """See get_or_set().""" + val = await self.aget(key, self._missing_key, version=version) + if val is self._missing_key: + if callable(default): + default = default() + await self.aadd(key, default, timeout=timeout, version=version) + # Fetch the value again to avoid a race condition if another caller + # added a value between the first aget() and the aadd() above. + return await self.aget(key, default, version=version) + return val + + def has_key(self, key, version=None): + """ + Return True if the key is in the cache and has not expired. + """ + return ( + self.get(key, self._missing_key, version=version) is not self._missing_key + ) + + async def ahas_key(self, key, version=None): + return ( + await self.aget(key, self._missing_key, version=version) + is not self._missing_key + ) + + def incr(self, key, delta=1, version=None): + """ + Add delta to value in the cache. If the key does not exist, raise a + ValueError exception. + """ + value = self.get(key, self._missing_key, version=version) + if value is self._missing_key: + raise ValueError("Key '%s' not found" % key) + new_value = value + delta + self.set(key, new_value, version=version) + return new_value + + async def aincr(self, key, delta=1, version=None): + """See incr().""" + value = await self.aget(key, self._missing_key, version=version) + if value is self._missing_key: + raise ValueError("Key '%s' not found" % key) + new_value = value + delta + await self.aset(key, new_value, version=version) + return new_value + + def decr(self, key, delta=1, version=None): + """ + Subtract delta from value in the cache. If the key does not exist, raise + a ValueError exception. + """ + return self.incr(key, -delta, version=version) + + async def adecr(self, key, delta=1, version=None): + return await self.aincr(key, -delta, version=version) + + def __contains__(self, key): + """ + Return True if the key is in the cache and has not expired. + """ + # This is a separate method, rather than just a copy of has_key(), + # so that it always has the same functionality as has_key(), even + # if a subclass overrides it. + return self.has_key(key) + + def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): + """ + Set a bunch of values in the cache at once from a dict of key/value + pairs. For certain backends (memcached), this is much more efficient + than calling set() multiple times. + + If timeout is given, use that timeout for the key; otherwise use the + default cache timeout. + + On backends that support it, return a list of keys that failed + insertion, or an empty list if all keys were inserted successfully. + """ + for key, value in data.items(): + self.set(key, value, timeout=timeout, version=version) + return [] + + async def aset_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): + for key, value in data.items(): + await self.aset(key, value, timeout=timeout, version=version) + return [] + + def delete_many(self, keys, version=None): + """ + Delete a bunch of values in the cache at once. For certain backends + (memcached), this is much more efficient than calling delete() multiple + times. + """ + for key in keys: + self.delete(key, version=version) + + async def adelete_many(self, keys, version=None): + for key in keys: + await self.adelete(key, version=version) + + def clear(self): + """Remove *all* values from the cache at once.""" + raise NotImplementedError( + "subclasses of BaseCache must provide a clear() method" + ) + + async def aclear(self): + return await sync_to_async(self.clear, thread_sensitive=True)() + + def incr_version(self, key, delta=1, version=None): + """ + Add delta to the cache version for the supplied key. Return the new + version. + """ + if version is None: + version = self.version + + value = self.get(key, self._missing_key, version=version) + if value is self._missing_key: + raise ValueError("Key '%s' not found" % key) + + self.set(key, value, version=version + delta) + self.delete(key, version=version) + return version + delta + + async def aincr_version(self, key, delta=1, version=None): + """See incr_version().""" + if version is None: + version = self.version + + value = await self.aget(key, self._missing_key, version=version) + if value is self._missing_key: + raise ValueError("Key '%s' not found" % key) + + await self.aset(key, value, version=version + delta) + await self.adelete(key, version=version) + return version + delta + + def decr_version(self, key, delta=1, version=None): + """ + Subtract delta from the cache version for the supplied key. Return the + new version. + """ + return self.incr_version(key, -delta, version) + + async def adecr_version(self, key, delta=1, version=None): + return await self.aincr_version(key, -delta, version) + + def close(self, **kwargs): + """Close the cache connection""" + pass + + async def aclose(self, **kwargs): + pass + + +memcached_error_chars_re = _lazy_re_compile(r"[\x00-\x20\x7f]") + + +def memcache_key_warnings(key): + if len(key) > MEMCACHE_MAX_KEY_LENGTH: + yield ( + "Cache key will cause errors if used with memcached: %r " + "(longer than %s)" % (key, MEMCACHE_MAX_KEY_LENGTH) + ) + if memcached_error_chars_re.search(key): + yield ( + "Cache key contains characters that will cause errors if used with " + f"memcached: {key!r}" + ) diff --git a/testbed/django__django/django/core/cache/backends/db.py b/testbed/django__django/django/core/cache/backends/db.py new file mode 100644 index 0000000000000000000000000000000000000000..f41105177f18cb35174768535f5426633e721072 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/db.py @@ -0,0 +1,293 @@ +"Database cache backend." +import base64 +import pickle +from datetime import datetime, timezone + +from django.conf import settings +from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache +from django.db import DatabaseError, connections, models, router, transaction +from django.utils.timezone import now as tz_now + + +class Options: + """A class that will quack like a Django model _meta class. + + This allows cache operations to be controlled by the router + """ + + def __init__(self, table): + self.db_table = table + self.app_label = "django_cache" + self.model_name = "cacheentry" + self.verbose_name = "cache entry" + self.verbose_name_plural = "cache entries" + self.object_name = "CacheEntry" + self.abstract = False + self.managed = True + self.proxy = False + self.swapped = False + + +class BaseDatabaseCache(BaseCache): + def __init__(self, table, params): + super().__init__(params) + self._table = table + + class CacheEntry: + _meta = Options(table) + + self.cache_model_class = CacheEntry + + +class DatabaseCache(BaseDatabaseCache): + # This class uses cursors provided by the database connection. This means + # it reads expiration values as aware or naive datetimes, depending on the + # value of USE_TZ and whether the database supports time zones. The ORM's + # conversion and adaptation infrastructure is then used to avoid comparing + # aware and naive datetimes accidentally. + + pickle_protocol = pickle.HIGHEST_PROTOCOL + + def get(self, key, default=None, version=None): + return self.get_many([key], version).get(key, default) + + def get_many(self, keys, version=None): + if not keys: + return {} + + key_map = { + self.make_and_validate_key(key, version=version): key for key in keys + } + + db = router.db_for_read(self.cache_model_class) + connection = connections[db] + quote_name = connection.ops.quote_name + table = quote_name(self._table) + + with connection.cursor() as cursor: + cursor.execute( + "SELECT %s, %s, %s FROM %s WHERE %s IN (%s)" + % ( + quote_name("cache_key"), + quote_name("value"), + quote_name("expires"), + table, + quote_name("cache_key"), + ", ".join(["%s"] * len(key_map)), + ), + list(key_map), + ) + rows = cursor.fetchall() + + result = {} + expired_keys = [] + expression = models.Expression(output_field=models.DateTimeField()) + converters = connection.ops.get_db_converters( + expression + ) + expression.get_db_converters(connection) + for key, value, expires in rows: + for converter in converters: + expires = converter(expires, expression, connection) + if expires < tz_now(): + expired_keys.append(key) + else: + value = connection.ops.process_clob(value) + value = pickle.loads(base64.b64decode(value.encode())) + result[key_map.get(key)] = value + self._base_delete_many(expired_keys) + return result + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + self._base_set("set", key, value, timeout) + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return self._base_set("add", key, value, timeout) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return self._base_set("touch", key, None, timeout) + + def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT): + timeout = self.get_backend_timeout(timeout) + db = router.db_for_write(self.cache_model_class) + connection = connections[db] + quote_name = connection.ops.quote_name + table = quote_name(self._table) + + with connection.cursor() as cursor: + cursor.execute("SELECT COUNT(*) FROM %s" % table) + num = cursor.fetchone()[0] + now = tz_now() + now = now.replace(microsecond=0) + if timeout is None: + exp = datetime.max + else: + tz = timezone.utc if settings.USE_TZ else None + exp = datetime.fromtimestamp(timeout, tz=tz) + exp = exp.replace(microsecond=0) + if num > self._max_entries: + self._cull(db, cursor, now, num) + pickled = pickle.dumps(value, self.pickle_protocol) + # The DB column is expecting a string, so make sure the value is a + # string, not bytes. Refs #19274. + b64encoded = base64.b64encode(pickled).decode("latin1") + try: + # Note: typecasting for datetimes is needed by some 3rd party + # database backends. All core backends work without typecasting, + # so be careful about changes here - test suite will NOT pick + # regressions. + with transaction.atomic(using=db): + cursor.execute( + "SELECT %s, %s FROM %s WHERE %s = %%s" + % ( + quote_name("cache_key"), + quote_name("expires"), + table, + quote_name("cache_key"), + ), + [key], + ) + result = cursor.fetchone() + + if result: + current_expires = result[1] + expression = models.Expression( + output_field=models.DateTimeField() + ) + for converter in connection.ops.get_db_converters( + expression + ) + expression.get_db_converters(connection): + current_expires = converter( + current_expires, expression, connection + ) + + exp = connection.ops.adapt_datetimefield_value(exp) + if result and mode == "touch": + cursor.execute( + "UPDATE %s SET %s = %%s WHERE %s = %%s" + % (table, quote_name("expires"), quote_name("cache_key")), + [exp, key], + ) + elif result and ( + mode == "set" or (mode == "add" and current_expires < now) + ): + cursor.execute( + "UPDATE %s SET %s = %%s, %s = %%s WHERE %s = %%s" + % ( + table, + quote_name("value"), + quote_name("expires"), + quote_name("cache_key"), + ), + [b64encoded, exp, key], + ) + elif mode != "touch": + cursor.execute( + "INSERT INTO %s (%s, %s, %s) VALUES (%%s, %%s, %%s)" + % ( + table, + quote_name("cache_key"), + quote_name("value"), + quote_name("expires"), + ), + [key, b64encoded, exp], + ) + else: + return False # touch failed. + except DatabaseError: + # To be threadsafe, updates/inserts are allowed to fail silently + return False + else: + return True + + def delete(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + return self._base_delete_many([key]) + + def delete_many(self, keys, version=None): + keys = [self.make_and_validate_key(key, version=version) for key in keys] + self._base_delete_many(keys) + + def _base_delete_many(self, keys): + if not keys: + return False + + db = router.db_for_write(self.cache_model_class) + connection = connections[db] + quote_name = connection.ops.quote_name + table = quote_name(self._table) + + with connection.cursor() as cursor: + cursor.execute( + "DELETE FROM %s WHERE %s IN (%s)" + % ( + table, + quote_name("cache_key"), + ", ".join(["%s"] * len(keys)), + ), + keys, + ) + return bool(cursor.rowcount) + + def has_key(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + + db = router.db_for_read(self.cache_model_class) + connection = connections[db] + quote_name = connection.ops.quote_name + + now = tz_now().replace(microsecond=0, tzinfo=None) + + with connection.cursor() as cursor: + cursor.execute( + "SELECT %s FROM %s WHERE %s = %%s and %s > %%s" + % ( + quote_name("cache_key"), + quote_name(self._table), + quote_name("cache_key"), + quote_name("expires"), + ), + [key, connection.ops.adapt_datetimefield_value(now)], + ) + return cursor.fetchone() is not None + + def _cull(self, db, cursor, now, num): + if self._cull_frequency == 0: + self.clear() + else: + connection = connections[db] + table = connection.ops.quote_name(self._table) + cursor.execute( + "DELETE FROM %s WHERE %s < %%s" + % ( + table, + connection.ops.quote_name("expires"), + ), + [connection.ops.adapt_datetimefield_value(now)], + ) + deleted_count = cursor.rowcount + remaining_num = num - deleted_count + if remaining_num > self._max_entries: + cull_num = remaining_num // self._cull_frequency + cursor.execute( + connection.ops.cache_key_culling_sql() % table, [cull_num] + ) + last_cache_key = cursor.fetchone() + if last_cache_key: + cursor.execute( + "DELETE FROM %s WHERE %s < %%s" + % ( + table, + connection.ops.quote_name("cache_key"), + ), + [last_cache_key[0]], + ) + + def clear(self): + db = router.db_for_write(self.cache_model_class) + connection = connections[db] + table = connection.ops.quote_name(self._table) + with connection.cursor() as cursor: + cursor.execute("DELETE FROM %s" % table) diff --git a/testbed/django__django/django/core/cache/backends/dummy.py b/testbed/django__django/django/core/cache/backends/dummy.py new file mode 100644 index 0000000000000000000000000000000000000000..7b4d339318422f45b7f5a35f974f1129a6626bd8 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/dummy.py @@ -0,0 +1,34 @@ +"Dummy cache backend" + +from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache + + +class DummyCache(BaseCache): + def __init__(self, host, *args, **kwargs): + super().__init__(*args, **kwargs) + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + self.make_and_validate_key(key, version=version) + return True + + def get(self, key, default=None, version=None): + self.make_and_validate_key(key, version=version) + return default + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + self.make_and_validate_key(key, version=version) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + self.make_and_validate_key(key, version=version) + return False + + def delete(self, key, version=None): + self.make_and_validate_key(key, version=version) + return False + + def has_key(self, key, version=None): + self.make_and_validate_key(key, version=version) + return False + + def clear(self): + pass diff --git a/testbed/django__django/django/core/cache/backends/filebased.py b/testbed/django__django/django/core/cache/backends/filebased.py new file mode 100644 index 0000000000000000000000000000000000000000..29d49c0ede9f1597ce849efd2e1ac3d59ffb226c --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/filebased.py @@ -0,0 +1,170 @@ +"File-based cache backend" +import glob +import os +import pickle +import random +import tempfile +import time +import zlib +from hashlib import md5 + +from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache +from django.core.files import locks +from django.core.files.move import file_move_safe + + +class FileBasedCache(BaseCache): + cache_suffix = ".djcache" + pickle_protocol = pickle.HIGHEST_PROTOCOL + + def __init__(self, dir, params): + super().__init__(params) + self._dir = os.path.abspath(dir) + self._createdir() + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + if self.has_key(key, version): + return False + self.set(key, value, timeout, version) + return True + + def get(self, key, default=None, version=None): + fname = self._key_to_file(key, version) + try: + with open(fname, "rb") as f: + if not self._is_expired(f): + return pickle.loads(zlib.decompress(f.read())) + except FileNotFoundError: + pass + return default + + def _write_content(self, file, timeout, value): + expiry = self.get_backend_timeout(timeout) + file.write(pickle.dumps(expiry, self.pickle_protocol)) + file.write(zlib.compress(pickle.dumps(value, self.pickle_protocol))) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + self._createdir() # Cache dir can be deleted at any time. + fname = self._key_to_file(key, version) + self._cull() # make some room if necessary + fd, tmp_path = tempfile.mkstemp(dir=self._dir) + renamed = False + try: + with open(fd, "wb") as f: + self._write_content(f, timeout, value) + file_move_safe(tmp_path, fname, allow_overwrite=True) + renamed = True + finally: + if not renamed: + os.remove(tmp_path) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + try: + with open(self._key_to_file(key, version), "r+b") as f: + try: + locks.lock(f, locks.LOCK_EX) + if self._is_expired(f): + return False + else: + previous_value = pickle.loads(zlib.decompress(f.read())) + f.seek(0) + self._write_content(f, timeout, previous_value) + return True + finally: + locks.unlock(f) + except FileNotFoundError: + return False + + def delete(self, key, version=None): + return self._delete(self._key_to_file(key, version)) + + def _delete(self, fname): + if not fname.startswith(self._dir) or not os.path.exists(fname): + return False + try: + os.remove(fname) + except FileNotFoundError: + # The file may have been removed by another process. + return False + return True + + def has_key(self, key, version=None): + fname = self._key_to_file(key, version) + try: + with open(fname, "rb") as f: + return not self._is_expired(f) + except FileNotFoundError: + return False + + def _cull(self): + """ + Remove random cache entries if max_entries is reached at a ratio + of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means + that the entire cache will be purged. + """ + filelist = self._list_cache_files() + num_entries = len(filelist) + if num_entries < self._max_entries: + return # return early if no culling is required + if self._cull_frequency == 0: + return self.clear() # Clear the cache when CULL_FREQUENCY = 0 + # Delete a random selection of entries + filelist = random.sample(filelist, int(num_entries / self._cull_frequency)) + for fname in filelist: + self._delete(fname) + + def _createdir(self): + # Set the umask because os.makedirs() doesn't apply the "mode" argument + # to intermediate-level directories. + old_umask = os.umask(0o077) + try: + os.makedirs(self._dir, 0o700, exist_ok=True) + finally: + os.umask(old_umask) + + def _key_to_file(self, key, version=None): + """ + Convert a key into a cache file path. Basically this is the + root cache path joined with the md5sum of the key and a suffix. + """ + key = self.make_and_validate_key(key, version=version) + return os.path.join( + self._dir, + "".join( + [ + md5(key.encode(), usedforsecurity=False).hexdigest(), + self.cache_suffix, + ] + ), + ) + + def clear(self): + """ + Remove all the cache files. + """ + for fname in self._list_cache_files(): + self._delete(fname) + + def _is_expired(self, f): + """ + Take an open cache file `f` and delete it if it's expired. + """ + try: + exp = pickle.load(f) + except EOFError: + exp = 0 # An empty file is considered expired. + if exp is not None and exp < time.time(): + f.close() # On Windows a file has to be closed before deleting + self._delete(f.name) + return True + return False + + def _list_cache_files(self): + """ + Get a list of paths to all the cache files. These are all the files + in the root cache dir that end on the cache_suffix. + """ + return [ + os.path.join(self._dir, fname) + for fname in glob.glob1(self._dir, "*%s" % self.cache_suffix) + ] diff --git a/testbed/django__django/django/core/cache/backends/locmem.py b/testbed/django__django/django/core/cache/backends/locmem.py new file mode 100644 index 0000000000000000000000000000000000000000..cbc8dba51e528a16d2f9de4545c46250a02a2287 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/locmem.py @@ -0,0 +1,117 @@ +"Thread-safe in-memory cache backend." +import pickle +import time +from collections import OrderedDict +from threading import Lock + +from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache + +# Global in-memory store of cache data. Keyed by name, to provide +# multiple named local memory caches. +_caches = {} +_expire_info = {} +_locks = {} + + +class LocMemCache(BaseCache): + pickle_protocol = pickle.HIGHEST_PROTOCOL + + def __init__(self, name, params): + super().__init__(params) + self._cache = _caches.setdefault(name, OrderedDict()) + self._expire_info = _expire_info.setdefault(name, {}) + self._lock = _locks.setdefault(name, Lock()) + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + pickled = pickle.dumps(value, self.pickle_protocol) + with self._lock: + if self._has_expired(key): + self._set(key, pickled, timeout) + return True + return False + + def get(self, key, default=None, version=None): + key = self.make_and_validate_key(key, version=version) + with self._lock: + if self._has_expired(key): + self._delete(key) + return default + pickled = self._cache[key] + self._cache.move_to_end(key, last=False) + return pickle.loads(pickled) + + def _set(self, key, value, timeout=DEFAULT_TIMEOUT): + if len(self._cache) >= self._max_entries: + self._cull() + self._cache[key] = value + self._cache.move_to_end(key, last=False) + self._expire_info[key] = self.get_backend_timeout(timeout) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + pickled = pickle.dumps(value, self.pickle_protocol) + with self._lock: + self._set(key, pickled, timeout) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + with self._lock: + if self._has_expired(key): + return False + self._expire_info[key] = self.get_backend_timeout(timeout) + return True + + def incr(self, key, delta=1, version=None): + key = self.make_and_validate_key(key, version=version) + with self._lock: + if self._has_expired(key): + self._delete(key) + raise ValueError("Key '%s' not found" % key) + pickled = self._cache[key] + value = pickle.loads(pickled) + new_value = value + delta + pickled = pickle.dumps(new_value, self.pickle_protocol) + self._cache[key] = pickled + self._cache.move_to_end(key, last=False) + return new_value + + def has_key(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + with self._lock: + if self._has_expired(key): + self._delete(key) + return False + return True + + def _has_expired(self, key): + exp = self._expire_info.get(key, -1) + return exp is not None and exp <= time.time() + + def _cull(self): + if self._cull_frequency == 0: + self._cache.clear() + self._expire_info.clear() + else: + count = len(self._cache) // self._cull_frequency + for i in range(count): + key, _ = self._cache.popitem() + del self._expire_info[key] + + def _delete(self, key): + try: + del self._cache[key] + del self._expire_info[key] + except KeyError: + return False + return True + + def delete(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + with self._lock: + return self._delete(key) + + def clear(self): + with self._lock: + self._cache.clear() + self._expire_info.clear() diff --git a/testbed/django__django/django/core/cache/backends/memcached.py b/testbed/django__django/django/core/cache/backends/memcached.py new file mode 100644 index 0000000000000000000000000000000000000000..6e2c76151187b215f7f5a912eb834cf09be7c0f1 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/memcached.py @@ -0,0 +1,188 @@ +"Memcached cache backend" + +import re +import time + +from django.core.cache.backends.base import ( + DEFAULT_TIMEOUT, + BaseCache, + InvalidCacheKey, + memcache_key_warnings, +) +from django.utils.functional import cached_property + + +class BaseMemcachedCache(BaseCache): + def __init__(self, server, params, library, value_not_found_exception): + super().__init__(params) + if isinstance(server, str): + self._servers = re.split("[;,]", server) + else: + self._servers = server + + # Exception type raised by the underlying client library for a + # nonexistent key. + self.LibraryValueNotFoundException = value_not_found_exception + + self._lib = library + self._class = library.Client + self._options = params.get("OPTIONS") or {} + + @property + def client_servers(self): + return self._servers + + @cached_property + def _cache(self): + """ + Implement transparent thread-safe access to a memcached client. + """ + return self._class(self.client_servers, **self._options) + + def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): + """ + Memcached deals with long (> 30 days) timeouts in a special + way. Call this function to obtain a safe value for your timeout. + """ + if timeout == DEFAULT_TIMEOUT: + timeout = self.default_timeout + + if timeout is None: + # Using 0 in memcache sets a non-expiring timeout. + return 0 + elif int(timeout) == 0: + # Other cache backends treat 0 as set-and-expire. To achieve this + # in memcache backends, a negative timeout must be passed. + timeout = -1 + + if timeout > 2592000: # 60*60*24*30, 30 days + # See https://github.com/memcached/memcached/wiki/Programming#expiration + # "Expiration times can be set from 0, meaning "never expire", to + # 30 days. Any time higher than 30 days is interpreted as a Unix + # timestamp date. If you want to expire an object on January 1st of + # next year, this is how you do that." + # + # This means that we have to switch to absolute timestamps. + timeout += int(time.time()) + return int(timeout) + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.add(key, value, self.get_backend_timeout(timeout)) + + def get(self, key, default=None, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.get(key, default) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + if not self._cache.set(key, value, self.get_backend_timeout(timeout)): + # Make sure the key doesn't keep its old value in case of failure + # to set (memcached's 1MB limit). + self._cache.delete(key) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return bool(self._cache.touch(key, self.get_backend_timeout(timeout))) + + def delete(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + return bool(self._cache.delete(key)) + + def get_many(self, keys, version=None): + key_map = { + self.make_and_validate_key(key, version=version): key for key in keys + } + ret = self._cache.get_multi(key_map.keys()) + return {key_map[k]: v for k, v in ret.items()} + + def close(self, **kwargs): + # Many clients don't clean up connections properly. + self._cache.disconnect_all() + + def incr(self, key, delta=1, version=None): + key = self.make_and_validate_key(key, version=version) + try: + # Memcached doesn't support negative delta. + if delta < 0: + val = self._cache.decr(key, -delta) + else: + val = self._cache.incr(key, delta) + # Normalize an exception raised by the underlying client library to + # ValueError in the event of a nonexistent key when calling + # incr()/decr(). + except self.LibraryValueNotFoundException: + val = None + if val is None: + raise ValueError("Key '%s' not found" % key) + return val + + def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): + safe_data = {} + original_keys = {} + for key, value in data.items(): + safe_key = self.make_and_validate_key(key, version=version) + safe_data[safe_key] = value + original_keys[safe_key] = key + failed_keys = self._cache.set_multi( + safe_data, self.get_backend_timeout(timeout) + ) + return [original_keys[k] for k in failed_keys] + + def delete_many(self, keys, version=None): + keys = [self.make_and_validate_key(key, version=version) for key in keys] + self._cache.delete_multi(keys) + + def clear(self): + self._cache.flush_all() + + def validate_key(self, key): + for warning in memcache_key_warnings(key): + raise InvalidCacheKey(warning) + + +class PyLibMCCache(BaseMemcachedCache): + "An implementation of a cache binding using pylibmc" + + def __init__(self, server, params): + import pylibmc + + super().__init__( + server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound + ) + + @property + def client_servers(self): + output = [] + for server in self._servers: + output.append(server.removeprefix("unix:")) + return output + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + if timeout == 0: + return self._cache.delete(key) + return self._cache.touch(key, self.get_backend_timeout(timeout)) + + def close(self, **kwargs): + # libmemcached manages its own connections. Don't call disconnect_all() + # as it resets the failover state and creates unnecessary reconnects. + pass + + +class PyMemcacheCache(BaseMemcachedCache): + """An implementation of a cache binding using pymemcache.""" + + def __init__(self, server, params): + import pymemcache.serde + + super().__init__( + server, params, library=pymemcache, value_not_found_exception=KeyError + ) + self._class = self._lib.HashClient + self._options = { + "allow_unicode_keys": True, + "default_noreply": False, + "serde": pymemcache.serde.pickle_serde, + **self._options, + } diff --git a/testbed/django__django/django/core/cache/backends/redis.py b/testbed/django__django/django/core/cache/backends/redis.py new file mode 100644 index 0000000000000000000000000000000000000000..eda8ac9457913d9b997aaac1484ee8ef63402233 --- /dev/null +++ b/testbed/django__django/django/core/cache/backends/redis.py @@ -0,0 +1,233 @@ +"""Redis cache backend.""" + +import pickle +import random +import re + +from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache +from django.utils.functional import cached_property +from django.utils.module_loading import import_string + + +class RedisSerializer: + def __init__(self, protocol=None): + self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol + + def dumps(self, obj): + # Only skip pickling for integers, a int subclasses as bool should be + # pickled. + if type(obj) is int: + return obj + return pickle.dumps(obj, self.protocol) + + def loads(self, data): + try: + return int(data) + except ValueError: + return pickle.loads(data) + + +class RedisCacheClient: + def __init__( + self, + servers, + serializer=None, + pool_class=None, + parser_class=None, + **options, + ): + import redis + + self._lib = redis + self._servers = servers + self._pools = {} + + self._client = self._lib.Redis + + if isinstance(pool_class, str): + pool_class = import_string(pool_class) + self._pool_class = pool_class or self._lib.ConnectionPool + + if isinstance(serializer, str): + serializer = import_string(serializer) + if callable(serializer): + serializer = serializer() + self._serializer = serializer or RedisSerializer() + + if isinstance(parser_class, str): + parser_class = import_string(parser_class) + parser_class = parser_class or self._lib.connection.DefaultParser + + self._pool_options = {"parser_class": parser_class, **options} + + def _get_connection_pool_index(self, write): + # Write to the first server. Read from other servers if there are more, + # otherwise read from the first server. + if write or len(self._servers) == 1: + return 0 + return random.randint(1, len(self._servers) - 1) + + def _get_connection_pool(self, write): + index = self._get_connection_pool_index(write) + if index not in self._pools: + self._pools[index] = self._pool_class.from_url( + self._servers[index], + **self._pool_options, + ) + return self._pools[index] + + def get_client(self, key=None, *, write=False): + # key is used so that the method signature remains the same and custom + # cache client can be implemented which might require the key to select + # the server, e.g. sharding. + pool = self._get_connection_pool(write) + return self._client(connection_pool=pool) + + def add(self, key, value, timeout): + client = self.get_client(key, write=True) + value = self._serializer.dumps(value) + + if timeout == 0: + if ret := bool(client.set(key, value, nx=True)): + client.delete(key) + return ret + else: + return bool(client.set(key, value, ex=timeout, nx=True)) + + def get(self, key, default): + client = self.get_client(key) + value = client.get(key) + return default if value is None else self._serializer.loads(value) + + def set(self, key, value, timeout): + client = self.get_client(key, write=True) + value = self._serializer.dumps(value) + if timeout == 0: + client.delete(key) + else: + client.set(key, value, ex=timeout) + + def touch(self, key, timeout): + client = self.get_client(key, write=True) + if timeout is None: + return bool(client.persist(key)) + else: + return bool(client.expire(key, timeout)) + + def delete(self, key): + client = self.get_client(key, write=True) + return bool(client.delete(key)) + + def get_many(self, keys): + client = self.get_client(None) + ret = client.mget(keys) + return { + k: self._serializer.loads(v) for k, v in zip(keys, ret) if v is not None + } + + def has_key(self, key): + client = self.get_client(key) + return bool(client.exists(key)) + + def incr(self, key, delta): + client = self.get_client(key, write=True) + if not client.exists(key): + raise ValueError("Key '%s' not found." % key) + return client.incr(key, delta) + + def set_many(self, data, timeout): + client = self.get_client(None, write=True) + pipeline = client.pipeline() + pipeline.mset({k: self._serializer.dumps(v) for k, v in data.items()}) + + if timeout is not None: + # Setting timeout for each key as redis does not support timeout + # with mset(). + for key in data: + pipeline.expire(key, timeout) + pipeline.execute() + + def delete_many(self, keys): + client = self.get_client(None, write=True) + client.delete(*keys) + + def clear(self): + client = self.get_client(None, write=True) + return bool(client.flushdb()) + + +class RedisCache(BaseCache): + def __init__(self, server, params): + super().__init__(params) + if isinstance(server, str): + self._servers = re.split("[;,]", server) + else: + self._servers = server + + self._class = RedisCacheClient + self._options = params.get("OPTIONS", {}) + + @cached_property + def _cache(self): + return self._class(self._servers, **self._options) + + def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): + if timeout == DEFAULT_TIMEOUT: + timeout = self.default_timeout + # The key will be made persistent if None used as a timeout. + # Non-positive values will cause the key to be deleted. + return None if timeout is None else max(0, int(timeout)) + + def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.add(key, value, self.get_backend_timeout(timeout)) + + def get(self, key, default=None, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.get(key, default) + + def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + self._cache.set(key, value, self.get_backend_timeout(timeout)) + + def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.touch(key, self.get_backend_timeout(timeout)) + + def delete(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.delete(key) + + def get_many(self, keys, version=None): + key_map = { + self.make_and_validate_key(key, version=version): key for key in keys + } + ret = self._cache.get_many(key_map.keys()) + return {key_map[k]: v for k, v in ret.items()} + + def has_key(self, key, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.has_key(key) + + def incr(self, key, delta=1, version=None): + key = self.make_and_validate_key(key, version=version) + return self._cache.incr(key, delta) + + def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): + if not data: + return [] + safe_data = {} + for key, value in data.items(): + key = self.make_and_validate_key(key, version=version) + safe_data[key] = value + self._cache.set_many(safe_data, self.get_backend_timeout(timeout)) + return [] + + def delete_many(self, keys, version=None): + if not keys: + return + safe_keys = [self.make_and_validate_key(key, version=version) for key in keys] + self._cache.delete_many(safe_keys) + + def clear(self): + return self._cache.clear() diff --git a/testbed/django__django/django/core/cache/utils.py b/testbed/django__django/django/core/cache/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..87f0f9cb09059322357d3439402fb803e395ff86 --- /dev/null +++ b/testbed/django__django/django/core/cache/utils.py @@ -0,0 +1,12 @@ +from hashlib import md5 + +TEMPLATE_FRAGMENT_KEY_TEMPLATE = "template.cache.%s.%s" + + +def make_template_fragment_key(fragment_name, vary_on=None): + hasher = md5(usedforsecurity=False) + if vary_on is not None: + for arg in vary_on: + hasher.update(str(arg).encode()) + hasher.update(b":") + return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, hasher.hexdigest()) diff --git a/testbed/django__django/django/core/checks/__init__.py b/testbed/django__django/django/core/checks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..998ab9dee2530bfec21f1e4c309677225274843e --- /dev/null +++ b/testbed/django__django/django/core/checks/__init__.py @@ -0,0 +1,47 @@ +from .messages import ( + CRITICAL, + DEBUG, + ERROR, + INFO, + WARNING, + CheckMessage, + Critical, + Debug, + Error, + Info, + Warning, +) +from .registry import Tags, register, run_checks, tag_exists + +# Import these to force registration of checks +import django.core.checks.async_checks # NOQA isort:skip +import django.core.checks.caches # NOQA isort:skip +import django.core.checks.compatibility.django_4_0 # NOQA isort:skip +import django.core.checks.database # NOQA isort:skip +import django.core.checks.files # NOQA isort:skip +import django.core.checks.model_checks # NOQA isort:skip +import django.core.checks.security.base # NOQA isort:skip +import django.core.checks.security.csrf # NOQA isort:skip +import django.core.checks.security.sessions # NOQA isort:skip +import django.core.checks.templates # NOQA isort:skip +import django.core.checks.translation # NOQA isort:skip +import django.core.checks.urls # NOQA isort:skip + + +__all__ = [ + "CheckMessage", + "Debug", + "Info", + "Warning", + "Error", + "Critical", + "DEBUG", + "INFO", + "WARNING", + "ERROR", + "CRITICAL", + "register", + "run_checks", + "tag_exists", + "Tags", +] diff --git a/testbed/django__django/django/core/checks/async_checks.py b/testbed/django__django/django/core/checks/async_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..a0e01867d92f454d2ce2c15385dddc7b3bd63d80 --- /dev/null +++ b/testbed/django__django/django/core/checks/async_checks.py @@ -0,0 +1,16 @@ +import os + +from . import Error, Tags, register + +E001 = Error( + "You should not set the DJANGO_ALLOW_ASYNC_UNSAFE environment variable in " + "deployment. This disables async safety protection.", + id="async.E001", +) + + +@register(Tags.async_support, deploy=True) +def check_async_unsafe(app_configs, **kwargs): + if os.environ.get("DJANGO_ALLOW_ASYNC_UNSAFE"): + return [E001] + return [] diff --git a/testbed/django__django/django/core/checks/caches.py b/testbed/django__django/django/core/checks/caches.py new file mode 100644 index 0000000000000000000000000000000000000000..c288a6ff4aa5333d942c4a1eb930e84441aa60a4 --- /dev/null +++ b/testbed/django__django/django/core/checks/caches.py @@ -0,0 +1,76 @@ +import pathlib + +from django.conf import settings +from django.core.cache import DEFAULT_CACHE_ALIAS, caches +from django.core.cache.backends.filebased import FileBasedCache + +from . import Error, Tags, Warning, register + +E001 = Error( + "You must define a '%s' cache in your CACHES setting." % DEFAULT_CACHE_ALIAS, + id="caches.E001", +) + + +@register(Tags.caches) +def check_default_cache_is_configured(app_configs, **kwargs): + if DEFAULT_CACHE_ALIAS not in settings.CACHES: + return [E001] + return [] + + +@register(Tags.caches, deploy=True) +def check_cache_location_not_exposed(app_configs, **kwargs): + errors = [] + for name in ("MEDIA_ROOT", "STATIC_ROOT", "STATICFILES_DIRS"): + setting = getattr(settings, name, None) + if not setting: + continue + if name == "STATICFILES_DIRS": + paths = set() + for staticfiles_dir in setting: + if isinstance(staticfiles_dir, (list, tuple)): + _, staticfiles_dir = staticfiles_dir + paths.add(pathlib.Path(staticfiles_dir).resolve()) + else: + paths = {pathlib.Path(setting).resolve()} + for alias in settings.CACHES: + cache = caches[alias] + if not isinstance(cache, FileBasedCache): + continue + cache_path = pathlib.Path(cache._dir).resolve() + if any(path == cache_path for path in paths): + relation = "matches" + elif any(path in cache_path.parents for path in paths): + relation = "is inside" + elif any(cache_path in path.parents for path in paths): + relation = "contains" + else: + continue + errors.append( + Warning( + f"Your '{alias}' cache configuration might expose your cache " + f"or lead to corruption of your data because its LOCATION " + f"{relation} {name}.", + id="caches.W002", + ) + ) + return errors + + +@register(Tags.caches) +def check_file_based_cache_is_absolute(app_configs, **kwargs): + errors = [] + for alias, config in settings.CACHES.items(): + cache = caches[alias] + if not isinstance(cache, FileBasedCache): + continue + if not pathlib.Path(config["LOCATION"]).is_absolute(): + errors.append( + Warning( + f"Your '{alias}' cache LOCATION path is relative. Use an " + f"absolute path instead.", + id="caches.W003", + ) + ) + return errors diff --git a/testbed/django__django/django/core/checks/compatibility/__init__.py b/testbed/django__django/django/core/checks/compatibility/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/checks/compatibility/django_4_0.py b/testbed/django__django/django/core/checks/compatibility/django_4_0.py new file mode 100644 index 0000000000000000000000000000000000000000..79ee5fa6b3f792b329fd2f2293a6f9356bc2c20d --- /dev/null +++ b/testbed/django__django/django/core/checks/compatibility/django_4_0.py @@ -0,0 +1,20 @@ +from django.conf import settings + +from .. import Error, Tags, register + + +@register(Tags.compatibility) +def check_csrf_trusted_origins(app_configs, **kwargs): + errors = [] + for origin in settings.CSRF_TRUSTED_ORIGINS: + if "://" not in origin: + errors.append( + Error( + "As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS " + "setting must start with a scheme (usually http:// or " + "https://) but found %s. See the release notes for details." + % origin, + id="4_0.E001", + ) + ) + return errors diff --git a/testbed/django__django/django/core/checks/database.py b/testbed/django__django/django/core/checks/database.py new file mode 100644 index 0000000000000000000000000000000000000000..d125f1eaa66c19bcee39fa2f41a4fc15d47b5195 --- /dev/null +++ b/testbed/django__django/django/core/checks/database.py @@ -0,0 +1,14 @@ +from django.db import connections + +from . import Tags, register + + +@register(Tags.database) +def check_database_backends(databases=None, **kwargs): + if databases is None: + return [] + issues = [] + for alias in databases: + conn = connections[alias] + issues.extend(conn.validation.check(**kwargs)) + return issues diff --git a/testbed/django__django/django/core/checks/files.py b/testbed/django__django/django/core/checks/files.py new file mode 100644 index 0000000000000000000000000000000000000000..40dc74584036e99854f5c32744899ae44bb4975d --- /dev/null +++ b/testbed/django__django/django/core/checks/files.py @@ -0,0 +1,19 @@ +from pathlib import Path + +from django.conf import settings + +from . import Error, Tags, register + + +@register(Tags.files) +def check_setting_file_upload_temp_dir(app_configs, **kwargs): + setting = getattr(settings, "FILE_UPLOAD_TEMP_DIR", None) + if setting and not Path(setting).is_dir(): + return [ + Error( + f"The FILE_UPLOAD_TEMP_DIR setting refers to the nonexistent " + f"directory '{setting}'.", + id="files.E001", + ), + ] + return [] diff --git a/testbed/django__django/django/core/checks/messages.py b/testbed/django__django/django/core/checks/messages.py new file mode 100644 index 0000000000000000000000000000000000000000..db7aa5511956bfcf42bbe0af9299091560b9874c --- /dev/null +++ b/testbed/django__django/django/core/checks/messages.py @@ -0,0 +1,81 @@ +# Levels +DEBUG = 10 +INFO = 20 +WARNING = 30 +ERROR = 40 +CRITICAL = 50 + + +class CheckMessage: + def __init__(self, level, msg, hint=None, obj=None, id=None): + if not isinstance(level, int): + raise TypeError("The first argument should be level.") + self.level = level + self.msg = msg + self.hint = hint + self.obj = obj + self.id = id + + def __eq__(self, other): + return isinstance(other, self.__class__) and all( + getattr(self, attr) == getattr(other, attr) + for attr in ["level", "msg", "hint", "obj", "id"] + ) + + def __str__(self): + from django.db import models + + if self.obj is None: + obj = "?" + elif isinstance(self.obj, models.base.ModelBase): + # We need to hardcode ModelBase and Field cases because its __str__ + # method doesn't return "applabel.modellabel" and cannot be changed. + obj = self.obj._meta.label + else: + obj = str(self.obj) + id = "(%s) " % self.id if self.id else "" + hint = "\n\tHINT: %s" % self.hint if self.hint else "" + return "%s: %s%s%s" % (obj, id, self.msg, hint) + + def __repr__(self): + return "<%s: level=%r, msg=%r, hint=%r, obj=%r, id=%r>" % ( + self.__class__.__name__, + self.level, + self.msg, + self.hint, + self.obj, + self.id, + ) + + def is_serious(self, level=ERROR): + return self.level >= level + + def is_silenced(self): + from django.conf import settings + + return self.id in settings.SILENCED_SYSTEM_CHECKS + + +class Debug(CheckMessage): + def __init__(self, *args, **kwargs): + super().__init__(DEBUG, *args, **kwargs) + + +class Info(CheckMessage): + def __init__(self, *args, **kwargs): + super().__init__(INFO, *args, **kwargs) + + +class Warning(CheckMessage): + def __init__(self, *args, **kwargs): + super().__init__(WARNING, *args, **kwargs) + + +class Error(CheckMessage): + def __init__(self, *args, **kwargs): + super().__init__(ERROR, *args, **kwargs) + + +class Critical(CheckMessage): + def __init__(self, *args, **kwargs): + super().__init__(CRITICAL, *args, **kwargs) diff --git a/testbed/django__django/django/core/checks/model_checks.py b/testbed/django__django/django/core/checks/model_checks.py new file mode 100644 index 0000000000000000000000000000000000000000..7a5bef9b263c41faaebd28cbb2bdd7da25f43489 --- /dev/null +++ b/testbed/django__django/django/core/checks/model_checks.py @@ -0,0 +1,227 @@ +import inspect +import types +from collections import defaultdict +from itertools import chain + +from django.apps import apps +from django.conf import settings +from django.core.checks import Error, Tags, Warning, register + + +@register(Tags.models) +def check_all_models(app_configs=None, **kwargs): + db_table_models = defaultdict(list) + indexes = defaultdict(list) + constraints = defaultdict(list) + errors = [] + if app_configs is None: + models = apps.get_models() + else: + models = chain.from_iterable( + app_config.get_models() for app_config in app_configs + ) + for model in models: + if model._meta.managed and not model._meta.proxy: + db_table_models[model._meta.db_table].append(model._meta.label) + if not inspect.ismethod(model.check): + errors.append( + Error( + "The '%s.check()' class method is currently overridden by %r." + % (model.__name__, model.check), + obj=model, + id="models.E020", + ) + ) + else: + errors.extend(model.check(**kwargs)) + for model_index in model._meta.indexes: + indexes[model_index.name].append(model._meta.label) + for model_constraint in model._meta.constraints: + constraints[model_constraint.name].append(model._meta.label) + if settings.DATABASE_ROUTERS: + error_class, error_id = Warning, "models.W035" + error_hint = ( + "You have configured settings.DATABASE_ROUTERS. Verify that %s " + "are correctly routed to separate databases." + ) + else: + error_class, error_id = Error, "models.E028" + error_hint = None + for db_table, model_labels in db_table_models.items(): + if len(model_labels) != 1: + model_labels_str = ", ".join(model_labels) + errors.append( + error_class( + "db_table '%s' is used by multiple models: %s." + % (db_table, model_labels_str), + obj=db_table, + hint=(error_hint % model_labels_str) if error_hint else None, + id=error_id, + ) + ) + for index_name, model_labels in indexes.items(): + if len(model_labels) > 1: + model_labels = set(model_labels) + errors.append( + Error( + "index name '%s' is not unique %s %s." + % ( + index_name, + "for model" if len(model_labels) == 1 else "among models:", + ", ".join(sorted(model_labels)), + ), + id="models.E029" if len(model_labels) == 1 else "models.E030", + ), + ) + for constraint_name, model_labels in constraints.items(): + if len(model_labels) > 1: + model_labels = set(model_labels) + errors.append( + Error( + "constraint name '%s' is not unique %s %s." + % ( + constraint_name, + "for model" if len(model_labels) == 1 else "among models:", + ", ".join(sorted(model_labels)), + ), + id="models.E031" if len(model_labels) == 1 else "models.E032", + ), + ) + return errors + + +def _check_lazy_references(apps, ignore=None): + """ + Ensure all lazy (i.e. string) model references have been resolved. + + Lazy references are used in various places throughout Django, primarily in + related fields and model signals. Identify those common cases and provide + more helpful error messages for them. + + The ignore parameter is used by StateApps to exclude swappable models from + this check. + """ + pending_models = set(apps._pending_operations) - (ignore or set()) + + # Short circuit if there aren't any errors. + if not pending_models: + return [] + + from django.db.models import signals + + model_signals = { + signal: name + for name, signal in vars(signals).items() + if isinstance(signal, signals.ModelSignal) + } + + def extract_operation(obj): + """ + Take a callable found in Apps._pending_operations and identify the + original callable passed to Apps.lazy_model_operation(). If that + callable was a partial, return the inner, non-partial function and + any arguments and keyword arguments that were supplied with it. + + obj is a callback defined locally in Apps.lazy_model_operation() and + annotated there with a `func` attribute so as to imitate a partial. + """ + operation, args, keywords = obj, [], {} + while hasattr(operation, "func"): + args.extend(getattr(operation, "args", [])) + keywords.update(getattr(operation, "keywords", {})) + operation = operation.func + return operation, args, keywords + + def app_model_error(model_key): + try: + apps.get_app_config(model_key[0]) + model_error = "app '%s' doesn't provide model '%s'" % model_key + except LookupError: + model_error = "app '%s' isn't installed" % model_key[0] + return model_error + + # Here are several functions which return CheckMessage instances for the + # most common usages of lazy operations throughout Django. These functions + # take the model that was being waited on as an (app_label, modelname) + # pair, the original lazy function, and its positional and keyword args as + # determined by extract_operation(). + + def field_error(model_key, func, args, keywords): + error_msg = ( + "The field %(field)s was declared with a lazy reference " + "to '%(model)s', but %(model_error)s." + ) + params = { + "model": ".".join(model_key), + "field": keywords["field"], + "model_error": app_model_error(model_key), + } + return Error(error_msg % params, obj=keywords["field"], id="fields.E307") + + def signal_connect_error(model_key, func, args, keywords): + error_msg = ( + "%(receiver)s was connected to the '%(signal)s' signal with a " + "lazy reference to the sender '%(model)s', but %(model_error)s." + ) + receiver = args[0] + # The receiver is either a function or an instance of class + # defining a `__call__` method. + if isinstance(receiver, types.FunctionType): + description = "The function '%s'" % receiver.__name__ + elif isinstance(receiver, types.MethodType): + description = "Bound method '%s.%s'" % ( + receiver.__self__.__class__.__name__, + receiver.__name__, + ) + else: + description = "An instance of class '%s'" % receiver.__class__.__name__ + signal_name = model_signals.get(func.__self__, "unknown") + params = { + "model": ".".join(model_key), + "receiver": description, + "signal": signal_name, + "model_error": app_model_error(model_key), + } + return Error(error_msg % params, obj=receiver.__module__, id="signals.E001") + + def default_error(model_key, func, args, keywords): + error_msg = ( + "%(op)s contains a lazy reference to %(model)s, but %(model_error)s." + ) + params = { + "op": func, + "model": ".".join(model_key), + "model_error": app_model_error(model_key), + } + return Error(error_msg % params, obj=func, id="models.E022") + + # Maps common uses of lazy operations to corresponding error functions + # defined above. If a key maps to None, no error will be produced. + # default_error() will be used for usages that don't appear in this dict. + known_lazy = { + ("django.db.models.fields.related", "resolve_related_class"): field_error, + ("django.db.models.fields.related", "set_managed"): None, + ("django.dispatch.dispatcher", "connect"): signal_connect_error, + } + + def build_error(model_key, func, args, keywords): + key = (func.__module__, func.__name__) + error_fn = known_lazy.get(key, default_error) + return error_fn(model_key, func, args, keywords) if error_fn else None + + return sorted( + filter( + None, + ( + build_error(model_key, *extract_operation(func)) + for model_key in pending_models + for func in apps._pending_operations[model_key] + ), + ), + key=lambda error: error.msg, + ) + + +@register(Tags.models) +def check_lazy_references(app_configs=None, **kwargs): + return _check_lazy_references(apps) diff --git a/testbed/django__django/django/core/checks/registry.py b/testbed/django__django/django/core/checks/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..f4bdea8691fc36c87dfa8f10f41b4924344de60c --- /dev/null +++ b/testbed/django__django/django/core/checks/registry.py @@ -0,0 +1,117 @@ +from itertools import chain + +from django.utils.inspect import func_accepts_kwargs +from django.utils.itercompat import is_iterable + + +class Tags: + """ + Built-in tags for internal checks. + """ + + admin = "admin" + async_support = "async_support" + caches = "caches" + compatibility = "compatibility" + database = "database" + files = "files" + models = "models" + security = "security" + signals = "signals" + sites = "sites" + staticfiles = "staticfiles" + templates = "templates" + translation = "translation" + urls = "urls" + + +class CheckRegistry: + def __init__(self): + self.registered_checks = set() + self.deployment_checks = set() + + def register(self, check=None, *tags, **kwargs): + """ + Can be used as a function or a decorator. Register given function + `f` labeled with given `tags`. The function should receive **kwargs + and return list of Errors and Warnings. + + Example:: + + registry = CheckRegistry() + @registry.register('mytag', 'anothertag') + def my_check(app_configs, **kwargs): + # ... perform checks and collect `errors` ... + return errors + # or + registry.register(my_check, 'mytag', 'anothertag') + """ + + def inner(check): + if not func_accepts_kwargs(check): + raise TypeError( + "Check functions must accept keyword arguments (**kwargs)." + ) + check.tags = tags + checks = ( + self.deployment_checks + if kwargs.get("deploy") + else self.registered_checks + ) + checks.add(check) + return check + + if callable(check): + return inner(check) + else: + if check: + tags += (check,) + return inner + + def run_checks( + self, + app_configs=None, + tags=None, + include_deployment_checks=False, + databases=None, + ): + """ + Run all registered checks and return list of Errors and Warnings. + """ + errors = [] + checks = self.get_checks(include_deployment_checks) + + if tags is not None: + checks = [check for check in checks if not set(check.tags).isdisjoint(tags)] + + for check in checks: + new_errors = check(app_configs=app_configs, databases=databases) + if not is_iterable(new_errors): + raise TypeError( + "The function %r did not return a list. All functions " + "registered with the checks registry must return a list." % check, + ) + errors.extend(new_errors) + return errors + + def tag_exists(self, tag, include_deployment_checks=False): + return tag in self.tags_available(include_deployment_checks) + + def tags_available(self, deployment_checks=False): + return set( + chain.from_iterable( + check.tags for check in self.get_checks(deployment_checks) + ) + ) + + def get_checks(self, include_deployment_checks=False): + checks = list(self.registered_checks) + if include_deployment_checks: + checks.extend(self.deployment_checks) + return checks + + +registry = CheckRegistry() +register = registry.register +run_checks = registry.run_checks +tag_exists = registry.tag_exists diff --git a/testbed/django__django/django/core/checks/security/__init__.py b/testbed/django__django/django/core/checks/security/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/checks/security/base.py b/testbed/django__django/django/core/checks/security/base.py new file mode 100644 index 0000000000000000000000000000000000000000..f85adabd1ad7c7b9915400b83bc119a1869947e2 --- /dev/null +++ b/testbed/django__django/django/core/checks/security/base.py @@ -0,0 +1,283 @@ +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured + +from .. import Error, Tags, Warning, register + +CROSS_ORIGIN_OPENER_POLICY_VALUES = { + "same-origin", + "same-origin-allow-popups", + "unsafe-none", +} +REFERRER_POLICY_VALUES = { + "no-referrer", + "no-referrer-when-downgrade", + "origin", + "origin-when-cross-origin", + "same-origin", + "strict-origin", + "strict-origin-when-cross-origin", + "unsafe-url", +} + +SECRET_KEY_INSECURE_PREFIX = "django-insecure-" +SECRET_KEY_MIN_LENGTH = 50 +SECRET_KEY_MIN_UNIQUE_CHARACTERS = 5 + +SECRET_KEY_WARNING_MSG = ( + f"Your %s has less than {SECRET_KEY_MIN_LENGTH} characters, less than " + f"{SECRET_KEY_MIN_UNIQUE_CHARACTERS} unique characters, or it's prefixed " + f"with '{SECRET_KEY_INSECURE_PREFIX}' indicating that it was generated " + f"automatically by Django. Please generate a long and random value, " + f"otherwise many of Django's security-critical features will be " + f"vulnerable to attack." +) + +W001 = Warning( + "You do not have 'django.middleware.security.SecurityMiddleware' " + "in your MIDDLEWARE so the SECURE_HSTS_SECONDS, " + "SECURE_CONTENT_TYPE_NOSNIFF, SECURE_REFERRER_POLICY, " + "SECURE_CROSS_ORIGIN_OPENER_POLICY, and SECURE_SSL_REDIRECT settings will " + "have no effect.", + id="security.W001", +) + +W002 = Warning( + "You do not have " + "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " + "MIDDLEWARE, so your pages will not be served with an " + "'x-frame-options' header. Unless there is a good reason for your " + "site to be served in a frame, you should consider enabling this " + "header to help prevent clickjacking attacks.", + id="security.W002", +) + +W004 = Warning( + "You have not set a value for the SECURE_HSTS_SECONDS setting. " + "If your entire site is served only over SSL, you may want to consider " + "setting a value and enabling HTTP Strict Transport Security. " + "Be sure to read the documentation first; enabling HSTS carelessly " + "can cause serious, irreversible problems.", + id="security.W004", +) + +W005 = Warning( + "You have not set the SECURE_HSTS_INCLUDE_SUBDOMAINS setting to True. " + "Without this, your site is potentially vulnerable to attack " + "via an insecure connection to a subdomain. Only set this to True if " + "you are certain that all subdomains of your domain should be served " + "exclusively via SSL.", + id="security.W005", +) + +W006 = Warning( + "Your SECURE_CONTENT_TYPE_NOSNIFF setting is not set to True, " + "so your pages will not be served with an " + "'X-Content-Type-Options: nosniff' header. " + "You should consider enabling this header to prevent the " + "browser from identifying content types incorrectly.", + id="security.W006", +) + +W008 = Warning( + "Your SECURE_SSL_REDIRECT setting is not set to True. " + "Unless your site should be available over both SSL and non-SSL " + "connections, you may want to either set this setting True " + "or configure a load balancer or reverse-proxy server " + "to redirect all connections to HTTPS.", + id="security.W008", +) + +W009 = Warning( + SECRET_KEY_WARNING_MSG % "SECRET_KEY", + id="security.W009", +) + +W018 = Warning( + "You should not have DEBUG set to True in deployment.", + id="security.W018", +) + +W019 = Warning( + "You have " + "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " + "MIDDLEWARE, but X_FRAME_OPTIONS is not set to 'DENY'. " + "Unless there is a good reason for your site to serve other parts of " + "itself in a frame, you should change it to 'DENY'.", + id="security.W019", +) + +W020 = Warning( + "ALLOWED_HOSTS must not be empty in deployment.", + id="security.W020", +) + +W021 = Warning( + "You have not set the SECURE_HSTS_PRELOAD setting to True. Without this, " + "your site cannot be submitted to the browser preload list.", + id="security.W021", +) + +W022 = Warning( + "You have not set the SECURE_REFERRER_POLICY setting. Without this, your " + "site will not send a Referrer-Policy header. You should consider " + "enabling this header to protect user privacy.", + id="security.W022", +) + +E023 = Error( + "You have set the SECURE_REFERRER_POLICY setting to an invalid value.", + hint="Valid values are: {}.".format(", ".join(sorted(REFERRER_POLICY_VALUES))), + id="security.E023", +) + +E024 = Error( + "You have set the SECURE_CROSS_ORIGIN_OPENER_POLICY setting to an invalid " + "value.", + hint="Valid values are: {}.".format( + ", ".join(sorted(CROSS_ORIGIN_OPENER_POLICY_VALUES)), + ), + id="security.E024", +) + +W025 = Warning(SECRET_KEY_WARNING_MSG, id="security.W025") + + +def _security_middleware(): + return "django.middleware.security.SecurityMiddleware" in settings.MIDDLEWARE + + +def _xframe_middleware(): + return ( + "django.middleware.clickjacking.XFrameOptionsMiddleware" in settings.MIDDLEWARE + ) + + +@register(Tags.security, deploy=True) +def check_security_middleware(app_configs, **kwargs): + passed_check = _security_middleware() + return [] if passed_check else [W001] + + +@register(Tags.security, deploy=True) +def check_xframe_options_middleware(app_configs, **kwargs): + passed_check = _xframe_middleware() + return [] if passed_check else [W002] + + +@register(Tags.security, deploy=True) +def check_sts(app_configs, **kwargs): + passed_check = not _security_middleware() or settings.SECURE_HSTS_SECONDS + return [] if passed_check else [W004] + + +@register(Tags.security, deploy=True) +def check_sts_include_subdomains(app_configs, **kwargs): + passed_check = ( + not _security_middleware() + or not settings.SECURE_HSTS_SECONDS + or settings.SECURE_HSTS_INCLUDE_SUBDOMAINS is True + ) + return [] if passed_check else [W005] + + +@register(Tags.security, deploy=True) +def check_sts_preload(app_configs, **kwargs): + passed_check = ( + not _security_middleware() + or not settings.SECURE_HSTS_SECONDS + or settings.SECURE_HSTS_PRELOAD is True + ) + return [] if passed_check else [W021] + + +@register(Tags.security, deploy=True) +def check_content_type_nosniff(app_configs, **kwargs): + passed_check = ( + not _security_middleware() or settings.SECURE_CONTENT_TYPE_NOSNIFF is True + ) + return [] if passed_check else [W006] + + +@register(Tags.security, deploy=True) +def check_ssl_redirect(app_configs, **kwargs): + passed_check = not _security_middleware() or settings.SECURE_SSL_REDIRECT is True + return [] if passed_check else [W008] + + +def _check_secret_key(secret_key): + return ( + len(set(secret_key)) >= SECRET_KEY_MIN_UNIQUE_CHARACTERS + and len(secret_key) >= SECRET_KEY_MIN_LENGTH + and not secret_key.startswith(SECRET_KEY_INSECURE_PREFIX) + ) + + +@register(Tags.security, deploy=True) +def check_secret_key(app_configs, **kwargs): + try: + secret_key = settings.SECRET_KEY + except (ImproperlyConfigured, AttributeError): + passed_check = False + else: + passed_check = _check_secret_key(secret_key) + return [] if passed_check else [W009] + + +@register(Tags.security, deploy=True) +def check_secret_key_fallbacks(app_configs, **kwargs): + warnings = [] + try: + fallbacks = settings.SECRET_KEY_FALLBACKS + except (ImproperlyConfigured, AttributeError): + warnings.append(Warning(W025.msg % "SECRET_KEY_FALLBACKS", id=W025.id)) + else: + for index, key in enumerate(fallbacks): + if not _check_secret_key(key): + warnings.append( + Warning(W025.msg % f"SECRET_KEY_FALLBACKS[{index}]", id=W025.id) + ) + return warnings + + +@register(Tags.security, deploy=True) +def check_debug(app_configs, **kwargs): + passed_check = not settings.DEBUG + return [] if passed_check else [W018] + + +@register(Tags.security, deploy=True) +def check_xframe_deny(app_configs, **kwargs): + passed_check = not _xframe_middleware() or settings.X_FRAME_OPTIONS == "DENY" + return [] if passed_check else [W019] + + +@register(Tags.security, deploy=True) +def check_allowed_hosts(app_configs, **kwargs): + return [] if settings.ALLOWED_HOSTS else [W020] + + +@register(Tags.security, deploy=True) +def check_referrer_policy(app_configs, **kwargs): + if _security_middleware(): + if settings.SECURE_REFERRER_POLICY is None: + return [W022] + # Support a comma-separated string or iterable of values to allow fallback. + if isinstance(settings.SECURE_REFERRER_POLICY, str): + values = {v.strip() for v in settings.SECURE_REFERRER_POLICY.split(",")} + else: + values = set(settings.SECURE_REFERRER_POLICY) + if not values <= REFERRER_POLICY_VALUES: + return [E023] + return [] + + +@register(Tags.security, deploy=True) +def check_cross_origin_opener_policy(app_configs, **kwargs): + if ( + _security_middleware() + and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY is not None + and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY + not in CROSS_ORIGIN_OPENER_POLICY_VALUES + ): + return [E024] + return [] diff --git a/testbed/django__django/django/core/checks/security/csrf.py b/testbed/django__django/django/core/checks/security/csrf.py new file mode 100644 index 0000000000000000000000000000000000000000..d00f2259c67a3e072b5a3e89b8bf17d861fb008c --- /dev/null +++ b/testbed/django__django/django/core/checks/security/csrf.py @@ -0,0 +1,67 @@ +import inspect + +from django.conf import settings + +from .. import Error, Tags, Warning, register + +W003 = Warning( + "You don't appear to be using Django's built-in " + "cross-site request forgery protection via the middleware " + "('django.middleware.csrf.CsrfViewMiddleware' is not in your " + "MIDDLEWARE). Enabling the middleware is the safest approach " + "to ensure you don't leave any holes.", + id="security.W003", +) + +W016 = Warning( + "You have 'django.middleware.csrf.CsrfViewMiddleware' in your " + "MIDDLEWARE, but you have not set CSRF_COOKIE_SECURE to True. " + "Using a secure-only CSRF cookie makes it more difficult for network " + "traffic sniffers to steal the CSRF token.", + id="security.W016", +) + + +def _csrf_middleware(): + return "django.middleware.csrf.CsrfViewMiddleware" in settings.MIDDLEWARE + + +@register(Tags.security, deploy=True) +def check_csrf_middleware(app_configs, **kwargs): + passed_check = _csrf_middleware() + return [] if passed_check else [W003] + + +@register(Tags.security, deploy=True) +def check_csrf_cookie_secure(app_configs, **kwargs): + passed_check = ( + settings.CSRF_USE_SESSIONS + or not _csrf_middleware() + or settings.CSRF_COOKIE_SECURE is True + ) + return [] if passed_check else [W016] + + +@register(Tags.security) +def check_csrf_failure_view(app_configs, **kwargs): + from django.middleware.csrf import _get_failure_view + + errors = [] + try: + view = _get_failure_view() + except ImportError: + msg = ( + "The CSRF failure view '%s' could not be imported." + % settings.CSRF_FAILURE_VIEW + ) + errors.append(Error(msg, id="security.E102")) + else: + try: + inspect.signature(view).bind(None, reason=None) + except TypeError: + msg = ( + "The CSRF failure view '%s' does not take the correct number of " + "arguments." % settings.CSRF_FAILURE_VIEW + ) + errors.append(Error(msg, id="security.E101")) + return errors diff --git a/testbed/django__django/django/core/checks/security/sessions.py b/testbed/django__django/django/core/checks/security/sessions.py new file mode 100644 index 0000000000000000000000000000000000000000..f317d09f731d2bc2f82dcfdaa7d5d9b82302e10e --- /dev/null +++ b/testbed/django__django/django/core/checks/security/sessions.py @@ -0,0 +1,99 @@ +from django.conf import settings + +from .. import Tags, Warning, register + + +def add_session_cookie_message(message): + return message + ( + " Using a secure-only session cookie makes it more difficult for " + "network traffic sniffers to hijack user sessions." + ) + + +W010 = Warning( + add_session_cookie_message( + "You have 'django.contrib.sessions' in your INSTALLED_APPS, " + "but you have not set SESSION_COOKIE_SECURE to True." + ), + id="security.W010", +) + +W011 = Warning( + add_session_cookie_message( + "You have 'django.contrib.sessions.middleware.SessionMiddleware' " + "in your MIDDLEWARE, but you have not set " + "SESSION_COOKIE_SECURE to True." + ), + id="security.W011", +) + +W012 = Warning( + add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."), + id="security.W012", +) + + +def add_httponly_message(message): + return message + ( + " Using an HttpOnly session cookie makes it more difficult for " + "cross-site scripting attacks to hijack user sessions." + ) + + +W013 = Warning( + add_httponly_message( + "You have 'django.contrib.sessions' in your INSTALLED_APPS, " + "but you have not set SESSION_COOKIE_HTTPONLY to True.", + ), + id="security.W013", +) + +W014 = Warning( + add_httponly_message( + "You have 'django.contrib.sessions.middleware.SessionMiddleware' " + "in your MIDDLEWARE, but you have not set " + "SESSION_COOKIE_HTTPONLY to True." + ), + id="security.W014", +) + +W015 = Warning( + add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."), + id="security.W015", +) + + +@register(Tags.security, deploy=True) +def check_session_cookie_secure(app_configs, **kwargs): + if settings.SESSION_COOKIE_SECURE is True: + return [] + errors = [] + if _session_app(): + errors.append(W010) + if _session_middleware(): + errors.append(W011) + if len(errors) > 1: + errors = [W012] + return errors + + +@register(Tags.security, deploy=True) +def check_session_cookie_httponly(app_configs, **kwargs): + if settings.SESSION_COOKIE_HTTPONLY is True: + return [] + errors = [] + if _session_app(): + errors.append(W013) + if _session_middleware(): + errors.append(W014) + if len(errors) > 1: + errors = [W015] + return errors + + +def _session_middleware(): + return "django.contrib.sessions.middleware.SessionMiddleware" in settings.MIDDLEWARE + + +def _session_app(): + return "django.contrib.sessions" in settings.INSTALLED_APPS diff --git a/testbed/django__django/django/core/checks/templates.py b/testbed/django__django/django/core/checks/templates.py new file mode 100644 index 0000000000000000000000000000000000000000..681aa1f31722f0878c89abe58555fba3d2e3e668 --- /dev/null +++ b/testbed/django__django/django/core/checks/templates.py @@ -0,0 +1,75 @@ +import copy +from collections import defaultdict + +from django.conf import settings +from django.template.backends.django import get_template_tag_modules + +from . import Error, Tags, Warning, register + +E001 = Error( + "You have 'APP_DIRS': True in your TEMPLATES but also specify 'loaders' " + "in OPTIONS. Either remove APP_DIRS or remove the 'loaders' option.", + id="templates.E001", +) +E002 = Error( + "'string_if_invalid' in TEMPLATES OPTIONS must be a string but got: {} ({}).", + id="templates.E002", +) +W003 = Warning( + "{} is used for multiple template tag modules: {}", + id="templates.E003", +) + + +@register(Tags.templates) +def check_setting_app_dirs_loaders(app_configs, **kwargs): + return ( + [E001] + if any( + conf.get("APP_DIRS") and "loaders" in conf.get("OPTIONS", {}) + for conf in settings.TEMPLATES + ) + else [] + ) + + +@register(Tags.templates) +def check_string_if_invalid_is_string(app_configs, **kwargs): + errors = [] + for conf in settings.TEMPLATES: + string_if_invalid = conf.get("OPTIONS", {}).get("string_if_invalid", "") + if not isinstance(string_if_invalid, str): + error = copy.copy(E002) + error.msg = error.msg.format( + string_if_invalid, type(string_if_invalid).__name__ + ) + errors.append(error) + return errors + + +@register(Tags.templates) +def check_for_template_tags_with_the_same_name(app_configs, **kwargs): + errors = [] + libraries = defaultdict(set) + + for conf in settings.TEMPLATES: + custom_libraries = conf.get("OPTIONS", {}).get("libraries", {}) + for module_name, module_path in custom_libraries.items(): + libraries[module_name].add(module_path) + + for module_name, module_path in get_template_tag_modules(): + libraries[module_name].add(module_path) + + for library_name, items in libraries.items(): + if len(items) > 1: + errors.append( + Warning( + W003.msg.format( + repr(library_name), + ", ".join(repr(item) for item in sorted(items)), + ), + id=W003.id, + ) + ) + + return errors diff --git a/testbed/django__django/django/core/checks/translation.py b/testbed/django__django/django/core/checks/translation.py new file mode 100644 index 0000000000000000000000000000000000000000..214e970373391a2f9b9ed257918c6f62eef8f9b8 --- /dev/null +++ b/testbed/django__django/django/core/checks/translation.py @@ -0,0 +1,66 @@ +from django.conf import settings +from django.utils.translation import get_supported_language_variant +from django.utils.translation.trans_real import language_code_re + +from . import Error, Tags, register + +E001 = Error( + "You have provided an invalid value for the LANGUAGE_CODE setting: {!r}.", + id="translation.E001", +) + +E002 = Error( + "You have provided an invalid language code in the LANGUAGES setting: {!r}.", + id="translation.E002", +) + +E003 = Error( + "You have provided an invalid language code in the LANGUAGES_BIDI setting: {!r}.", + id="translation.E003", +) + +E004 = Error( + "You have provided a value for the LANGUAGE_CODE setting that is not in " + "the LANGUAGES setting.", + id="translation.E004", +) + + +@register(Tags.translation) +def check_setting_language_code(app_configs, **kwargs): + """Error if LANGUAGE_CODE setting is invalid.""" + tag = settings.LANGUAGE_CODE + if not isinstance(tag, str) or not language_code_re.match(tag): + return [Error(E001.msg.format(tag), id=E001.id)] + return [] + + +@register(Tags.translation) +def check_setting_languages(app_configs, **kwargs): + """Error if LANGUAGES setting is invalid.""" + return [ + Error(E002.msg.format(tag), id=E002.id) + for tag, _ in settings.LANGUAGES + if not isinstance(tag, str) or not language_code_re.match(tag) + ] + + +@register(Tags.translation) +def check_setting_languages_bidi(app_configs, **kwargs): + """Error if LANGUAGES_BIDI setting is invalid.""" + return [ + Error(E003.msg.format(tag), id=E003.id) + for tag in settings.LANGUAGES_BIDI + if not isinstance(tag, str) or not language_code_re.match(tag) + ] + + +@register(Tags.translation) +def check_language_settings_consistent(app_configs, **kwargs): + """Error if language settings are not consistent with each other.""" + try: + get_supported_language_variant(settings.LANGUAGE_CODE) + except LookupError: + return [E004] + else: + return [] diff --git a/testbed/django__django/django/core/checks/urls.py b/testbed/django__django/django/core/checks/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..34eff9671de9af7fa7e18b13743acce9a5a0a400 --- /dev/null +++ b/testbed/django__django/django/core/checks/urls.py @@ -0,0 +1,117 @@ +from collections import Counter + +from django.conf import settings + +from . import Error, Tags, Warning, register + + +@register(Tags.urls) +def check_url_config(app_configs, **kwargs): + if getattr(settings, "ROOT_URLCONF", None): + from django.urls import get_resolver + + resolver = get_resolver() + return check_resolver(resolver) + return [] + + +def check_resolver(resolver): + """ + Recursively check the resolver. + """ + check_method = getattr(resolver, "check", None) + if check_method is not None: + return check_method() + elif not hasattr(resolver, "resolve"): + return get_warning_for_invalid_pattern(resolver) + else: + return [] + + +@register(Tags.urls) +def check_url_namespaces_unique(app_configs, **kwargs): + """ + Warn if URL namespaces used in applications aren't unique. + """ + if not getattr(settings, "ROOT_URLCONF", None): + return [] + + from django.urls import get_resolver + + resolver = get_resolver() + all_namespaces = _load_all_namespaces(resolver) + counter = Counter(all_namespaces) + non_unique_namespaces = [n for n, count in counter.items() if count > 1] + errors = [] + for namespace in non_unique_namespaces: + errors.append( + Warning( + "URL namespace '{}' isn't unique. You may not be able to reverse " + "all URLs in this namespace".format(namespace), + id="urls.W005", + ) + ) + return errors + + +def _load_all_namespaces(resolver, parents=()): + """ + Recursively load all namespaces from URL patterns. + """ + url_patterns = getattr(resolver, "url_patterns", []) + namespaces = [ + ":".join(parents + (url.namespace,)) + for url in url_patterns + if getattr(url, "namespace", None) is not None + ] + for pattern in url_patterns: + namespace = getattr(pattern, "namespace", None) + current = parents + if namespace is not None: + current += (namespace,) + namespaces.extend(_load_all_namespaces(pattern, current)) + return namespaces + + +def get_warning_for_invalid_pattern(pattern): + """ + Return a list containing a warning that the pattern is invalid. + + describe_pattern() cannot be used here, because we cannot rely on the + urlpattern having regex or name attributes. + """ + if isinstance(pattern, str): + hint = ( + "Try removing the string '{}'. The list of urlpatterns should not " + "have a prefix string as the first element.".format(pattern) + ) + elif isinstance(pattern, tuple): + hint = "Try using path() instead of a tuple." + else: + hint = None + + return [ + Error( + "Your URL pattern {!r} is invalid. Ensure that urlpatterns is a list " + "of path() and/or re_path() instances.".format(pattern), + hint=hint, + id="urls.E004", + ) + ] + + +@register(Tags.urls) +def check_url_settings(app_configs, **kwargs): + errors = [] + for name in ("STATIC_URL", "MEDIA_URL"): + value = getattr(settings, name) + if value and not value.endswith("/"): + errors.append(E006(name)) + return errors + + +def E006(name): + return Error( + "The {} setting must end with a slash.".format(name), + id="urls.E006", + ) diff --git a/testbed/django__django/django/core/files/__init__.py b/testbed/django__django/django/core/files/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d046aca084780a21b7853a848b29dda71fc80737 --- /dev/null +++ b/testbed/django__django/django/core/files/__init__.py @@ -0,0 +1,3 @@ +from django.core.files.base import File + +__all__ = ["File"] diff --git a/testbed/django__django/django/core/files/base.py b/testbed/django__django/django/core/files/base.py new file mode 100644 index 0000000000000000000000000000000000000000..3ca43ec2547fdce34248892ac2d63b08d7d351d3 --- /dev/null +++ b/testbed/django__django/django/core/files/base.py @@ -0,0 +1,161 @@ +import os +from io import BytesIO, StringIO, UnsupportedOperation + +from django.core.files.utils import FileProxyMixin +from django.utils.functional import cached_property + + +class File(FileProxyMixin): + DEFAULT_CHUNK_SIZE = 64 * 2**10 + + def __init__(self, file, name=None): + self.file = file + if name is None: + name = getattr(file, "name", None) + self.name = name + if hasattr(file, "mode"): + self.mode = file.mode + + def __str__(self): + return self.name or "" + + def __repr__(self): + return "<%s: %s>" % (self.__class__.__name__, self or "None") + + def __bool__(self): + return bool(self.name) + + def __len__(self): + return self.size + + @cached_property + def size(self): + if hasattr(self.file, "size"): + return self.file.size + if hasattr(self.file, "name"): + try: + return os.path.getsize(self.file.name) + except (OSError, TypeError): + pass + if hasattr(self.file, "tell") and hasattr(self.file, "seek"): + pos = self.file.tell() + self.file.seek(0, os.SEEK_END) + size = self.file.tell() + self.file.seek(pos) + return size + raise AttributeError("Unable to determine the file's size.") + + def chunks(self, chunk_size=None): + """ + Read the file and yield chunks of ``chunk_size`` bytes (defaults to + ``File.DEFAULT_CHUNK_SIZE``). + """ + chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE + try: + self.seek(0) + except (AttributeError, UnsupportedOperation): + pass + + while True: + data = self.read(chunk_size) + if not data: + break + yield data + + def multiple_chunks(self, chunk_size=None): + """ + Return ``True`` if you can expect multiple chunks. + + NB: If a particular file representation is in memory, subclasses should + always return ``False`` -- there's no good reason to read from memory in + chunks. + """ + return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE) + + def __iter__(self): + # Iterate over this file-like object by newlines + buffer_ = None + for chunk in self.chunks(): + for line in chunk.splitlines(True): + if buffer_: + if endswith_cr(buffer_) and not equals_lf(line): + # Line split after a \r newline; yield buffer_. + yield buffer_ + # Continue with line. + else: + # Line either split without a newline (line + # continues after buffer_) or with \r\n + # newline (line == b'\n'). + line = buffer_ + line + # buffer_ handled, clear it. + buffer_ = None + + # If this is the end of a \n or \r\n line, yield. + if endswith_lf(line): + yield line + else: + buffer_ = line + + if buffer_ is not None: + yield buffer_ + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close() + + def open(self, mode=None): + if not self.closed: + self.seek(0) + elif self.name and os.path.exists(self.name): + self.file = open(self.name, mode or self.mode) + else: + raise ValueError("The file cannot be reopened.") + return self + + def close(self): + self.file.close() + + +class ContentFile(File): + """ + A File-like object that takes just raw content, rather than an actual file. + """ + + def __init__(self, content, name=None): + stream_class = StringIO if isinstance(content, str) else BytesIO + super().__init__(stream_class(content), name=name) + self.size = len(content) + + def __str__(self): + return "Raw content" + + def __bool__(self): + return True + + def open(self, mode=None): + self.seek(0) + return self + + def close(self): + pass + + def write(self, data): + self.__dict__.pop("size", None) # Clear the computed size. + return self.file.write(data) + + +def endswith_cr(line): + """Return True if line (a text or bytestring) ends with '\r'.""" + return line.endswith("\r" if isinstance(line, str) else b"\r") + + +def endswith_lf(line): + """Return True if line (a text or bytestring) ends with '\n'.""" + return line.endswith("\n" if isinstance(line, str) else b"\n") + + +def equals_lf(line): + """Return True if line (a text or bytestring) equals '\n'.""" + return line == ("\n" if isinstance(line, str) else b"\n") diff --git a/testbed/django__django/django/core/files/images.py b/testbed/django__django/django/core/files/images.py new file mode 100644 index 0000000000000000000000000000000000000000..6a603f24fca4c4a70fd4d33ea8b547f640656c2d --- /dev/null +++ b/testbed/django__django/django/core/files/images.py @@ -0,0 +1,88 @@ +""" +Utility functions for handling images. + +Requires Pillow as you might imagine. +""" +import struct +import zlib + +from django.core.files import File + + +class ImageFile(File): + """ + A mixin for use alongside django.core.files.base.File, which provides + additional features for dealing with images. + """ + + @property + def width(self): + return self._get_image_dimensions()[0] + + @property + def height(self): + return self._get_image_dimensions()[1] + + def _get_image_dimensions(self): + if not hasattr(self, "_dimensions_cache"): + close = self.closed + self.open() + self._dimensions_cache = get_image_dimensions(self, close=close) + return self._dimensions_cache + + +def get_image_dimensions(file_or_path, close=False): + """ + Return the (width, height) of an image, given an open file or a path. Set + 'close' to True to close the file at the end if it is initially in an open + state. + """ + from PIL import ImageFile as PillowImageFile + + p = PillowImageFile.Parser() + if hasattr(file_or_path, "read"): + file = file_or_path + file_pos = file.tell() + file.seek(0) + else: + try: + file = open(file_or_path, "rb") + except OSError: + return (None, None) + close = True + try: + # Most of the time Pillow only needs a small chunk to parse the image + # and get the dimensions, but with some TIFF files Pillow needs to + # parse the whole file. + chunk_size = 1024 + while 1: + data = file.read(chunk_size) + if not data: + break + try: + p.feed(data) + except zlib.error as e: + # ignore zlib complaining on truncated stream, just feed more + # data to parser (ticket #19457). + if e.args[0].startswith("Error -5"): + pass + else: + raise + except struct.error: + # Ignore PIL failing on a too short buffer when reads return + # less bytes than expected. Skip and feed more data to the + # parser (ticket #24544). + pass + except RuntimeError: + # e.g. "RuntimeError: could not create decoder object" for + # WebP files. A different chunk_size may work. + pass + if p.image: + return p.image.size + chunk_size *= 2 + return (None, None) + finally: + if close: + file.close() + else: + file.seek(file_pos) diff --git a/testbed/django__django/django/core/files/locks.py b/testbed/django__django/django/core/files/locks.py new file mode 100644 index 0000000000000000000000000000000000000000..c0f471f87da5d6bf584fca89623d9a1888f5727d --- /dev/null +++ b/testbed/django__django/django/core/files/locks.py @@ -0,0 +1,127 @@ +""" +Portable file locking utilities. + +Based partially on an example by Jonathan Feignberg in the Python +Cookbook [1] (licensed under the Python Software License) and a ctypes port by +Anatoly Techtonik for Roundup [2] (license [3]). + +[1] https://code.activestate.com/recipes/65203/ +[2] https://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py # NOQA +[3] https://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt + +Example Usage:: + + >>> from django.core.files import locks + >>> with open('./file', 'wb') as f: + ... locks.lock(f, locks.LOCK_EX) + ... f.write('Django') +""" +import os + +__all__ = ("LOCK_EX", "LOCK_SH", "LOCK_NB", "lock", "unlock") + + +def _fd(f): + """Get a filedescriptor from something which could be a file or an fd.""" + return f.fileno() if hasattr(f, "fileno") else f + + +if os.name == "nt": + import msvcrt + from ctypes import ( + POINTER, + Structure, + Union, + WinDLL, + byref, + c_int64, + c_ulong, + c_void_p, + sizeof, + ) + from ctypes.wintypes import BOOL, DWORD, HANDLE + + LOCK_SH = 0 # the default + LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY + LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK + + # --- Adapted from the pyserial project --- + # detect size of ULONG_PTR + if sizeof(c_ulong) != sizeof(c_void_p): + ULONG_PTR = c_int64 + else: + ULONG_PTR = c_ulong + PVOID = c_void_p + + # --- Union inside Structure by stackoverflow:3480240 --- + class _OFFSET(Structure): + _fields_ = [("Offset", DWORD), ("OffsetHigh", DWORD)] + + class _OFFSET_UNION(Union): + _anonymous_ = ["_offset"] + _fields_ = [("_offset", _OFFSET), ("Pointer", PVOID)] + + class OVERLAPPED(Structure): + _anonymous_ = ["_offset_union"] + _fields_ = [ + ("Internal", ULONG_PTR), + ("InternalHigh", ULONG_PTR), + ("_offset_union", _OFFSET_UNION), + ("hEvent", HANDLE), + ] + + LPOVERLAPPED = POINTER(OVERLAPPED) + + # --- Define function prototypes for extra safety --- + kernel32 = WinDLL("kernel32") + LockFileEx = kernel32.LockFileEx + LockFileEx.restype = BOOL + LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED] + UnlockFileEx = kernel32.UnlockFileEx + UnlockFileEx.restype = BOOL + UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED] + + def lock(f, flags): + hfile = msvcrt.get_osfhandle(_fd(f)) + overlapped = OVERLAPPED() + ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped)) + return bool(ret) + + def unlock(f): + hfile = msvcrt.get_osfhandle(_fd(f)) + overlapped = OVERLAPPED() + ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped)) + return bool(ret) + +else: + try: + import fcntl + + LOCK_SH = fcntl.LOCK_SH # shared lock + LOCK_NB = fcntl.LOCK_NB # non-blocking + LOCK_EX = fcntl.LOCK_EX + except (ImportError, AttributeError): + # File locking is not supported. + LOCK_EX = LOCK_SH = LOCK_NB = 0 + + # Dummy functions that don't do anything. + def lock(f, flags): + # File is not locked + return False + + def unlock(f): + # File is unlocked + return True + + else: + + def lock(f, flags): + try: + fcntl.flock(_fd(f), flags) + return True + except BlockingIOError: + return False + + def unlock(f): + fcntl.flock(_fd(f), fcntl.LOCK_UN) + return True diff --git a/testbed/django__django/django/core/files/move.py b/testbed/django__django/django/core/files/move.py new file mode 100644 index 0000000000000000000000000000000000000000..95d69f9d944c898625464d4735695b76b3751743 --- /dev/null +++ b/testbed/django__django/django/core/files/move.py @@ -0,0 +1,102 @@ +""" +Move a file in the safest way possible:: + + >>> from django.core.files.move import file_move_safe + >>> file_move_safe("/tmp/old_file", "/tmp/new_file") +""" + +import os +from shutil import copymode, copystat + +from django.core.files import locks + +__all__ = ["file_move_safe"] + + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, "samefile"): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return os.path.normcase(os.path.abspath(src)) == os.path.normcase( + os.path.abspath(dst) + ) + + +def file_move_safe( + old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False +): + """ + Move a file from one location to another in the safest way possible. + + First, try ``os.rename``, which is simple but will break across filesystems. + If that fails, stream manually from one file to another in pure Python. + + If the destination file exists and ``allow_overwrite`` is ``False``, raise + ``FileExistsError``. + """ + # There's no reason to move if we don't have to. + if _samefile(old_file_name, new_file_name): + return + + try: + if not allow_overwrite and os.access(new_file_name, os.F_OK): + raise FileExistsError( + "Destination file %s exists and allow_overwrite is False." + % new_file_name + ) + + os.rename(old_file_name, new_file_name) + return + except OSError: + # OSError happens with os.rename() if moving to another filesystem or + # when moving opened files on certain operating systems. + pass + + # first open the old file, so that it won't go away + with open(old_file_name, "rb") as old_file: + # now open the new file, not forgetting allow_overwrite + fd = os.open( + new_file_name, + ( + os.O_WRONLY + | os.O_CREAT + | getattr(os, "O_BINARY", 0) + | (os.O_EXCL if not allow_overwrite else 0) + ), + ) + try: + locks.lock(fd, locks.LOCK_EX) + current_chunk = None + while current_chunk != b"": + current_chunk = old_file.read(chunk_size) + os.write(fd, current_chunk) + finally: + locks.unlock(fd) + os.close(fd) + + try: + copystat(old_file_name, new_file_name) + except PermissionError: + # Certain filesystems (e.g. CIFS) fail to copy the file's metadata if + # the type of the destination filesystem isn't the same as the source + # filesystem. This also happens with some SELinux-enabled systems. + # Ignore that, but try to set basic permissions. + try: + copymode(old_file_name, new_file_name) + except PermissionError: + pass + + try: + os.remove(old_file_name) + except PermissionError as e: + # Certain operating systems (Cygwin and Windows) + # fail when deleting opened files, ignore it. (For the + # systems where this happens, temporary files will be auto-deleted + # on close anyway.) + if getattr(e, "winerror", 0) != 32: + raise diff --git a/testbed/django__django/django/core/files/storage/__init__.py b/testbed/django__django/django/core/files/storage/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..267f71ba376e66d4fcb649dff15bd16490215ab9 --- /dev/null +++ b/testbed/django__django/django/core/files/storage/__init__.py @@ -0,0 +1,42 @@ +import warnings + +from django.conf import DEFAULT_STORAGE_ALIAS, settings +from django.utils.deprecation import RemovedInDjango51Warning +from django.utils.functional import LazyObject +from django.utils.module_loading import import_string + +from .base import Storage +from .filesystem import FileSystemStorage +from .handler import InvalidStorageError, StorageHandler +from .memory import InMemoryStorage + +__all__ = ( + "FileSystemStorage", + "InMemoryStorage", + "Storage", + "DefaultStorage", + "default_storage", + "get_storage_class", + "InvalidStorageError", + "StorageHandler", + "storages", +) + +GET_STORAGE_CLASS_DEPRECATED_MSG = ( + "django.core.files.storage.get_storage_class is deprecated in favor of " + "using django.core.files.storage.storages." +) + + +def get_storage_class(import_path=None): + warnings.warn(GET_STORAGE_CLASS_DEPRECATED_MSG, RemovedInDjango51Warning) + return import_string(import_path or settings.DEFAULT_FILE_STORAGE) + + +class DefaultStorage(LazyObject): + def _setup(self): + self._wrapped = storages[DEFAULT_STORAGE_ALIAS] + + +storages = StorageHandler() +default_storage = DefaultStorage() diff --git a/testbed/django__django/django/core/files/storage/base.py b/testbed/django__django/django/core/files/storage/base.py new file mode 100644 index 0000000000000000000000000000000000000000..16ac22f70a8d245794565d50b8ce118d8020b471 --- /dev/null +++ b/testbed/django__django/django/core/files/storage/base.py @@ -0,0 +1,190 @@ +import os +import pathlib + +from django.core.exceptions import SuspiciousFileOperation +from django.core.files import File +from django.core.files.utils import validate_file_name +from django.utils.crypto import get_random_string +from django.utils.text import get_valid_filename + + +class Storage: + """ + A base storage class, providing some default behaviors that all other + storage systems can inherit or override, as necessary. + """ + + # The following methods represent a public interface to private methods. + # These shouldn't be overridden by subclasses unless absolutely necessary. + + def open(self, name, mode="rb"): + """Retrieve the specified file from storage.""" + return self._open(name, mode) + + def save(self, name, content, max_length=None): + """ + Save new content to the file specified by name. The content should be + a proper File object or any Python file-like object, ready to be read + from the beginning. + """ + # Get the proper name for the file, as it will actually be saved. + if name is None: + name = content.name + + if not hasattr(content, "chunks"): + content = File(content, name) + + name = self.get_available_name(name, max_length=max_length) + name = self._save(name, content) + # Ensure that the name returned from the storage system is still valid. + validate_file_name(name, allow_relative_path=True) + return name + + # These methods are part of the public API, with default implementations. + + def get_valid_name(self, name): + """ + Return a filename, based on the provided filename, that's suitable for + use in the target storage system. + """ + return get_valid_filename(name) + + def get_alternative_name(self, file_root, file_ext): + """ + Return an alternative filename, by adding an underscore and a random 7 + character alphanumeric string (before the file extension, if one + exists) to the filename. + """ + return "%s_%s%s" % (file_root, get_random_string(7), file_ext) + + def get_available_name(self, name, max_length=None): + """ + Return a filename that's free on the target storage system and + available for new content to be written to. + """ + name = str(name).replace("\\", "/") + dir_name, file_name = os.path.split(name) + if ".." in pathlib.PurePath(dir_name).parts: + raise SuspiciousFileOperation( + "Detected path traversal attempt in '%s'" % dir_name + ) + validate_file_name(file_name) + file_root, file_ext = os.path.splitext(file_name) + # If the filename already exists, generate an alternative filename + # until it doesn't exist. + # Truncate original name if required, so the new filename does not + # exceed the max_length. + while self.exists(name) or (max_length and len(name) > max_length): + # file_ext includes the dot. + name = os.path.join( + dir_name, self.get_alternative_name(file_root, file_ext) + ) + if max_length is None: + continue + # Truncate file_root if max_length exceeded. + truncation = len(name) - max_length + if truncation > 0: + file_root = file_root[:-truncation] + # Entire file_root was truncated in attempt to find an + # available filename. + if not file_root: + raise SuspiciousFileOperation( + 'Storage can not find an available filename for "%s". ' + "Please make sure that the corresponding file field " + 'allows sufficient "max_length".' % name + ) + name = os.path.join( + dir_name, self.get_alternative_name(file_root, file_ext) + ) + return name + + def generate_filename(self, filename): + """ + Validate the filename by calling get_valid_name() and return a filename + to be passed to the save() method. + """ + filename = str(filename).replace("\\", "/") + # `filename` may include a path as returned by FileField.upload_to. + dirname, filename = os.path.split(filename) + if ".." in pathlib.PurePath(dirname).parts: + raise SuspiciousFileOperation( + "Detected path traversal attempt in '%s'" % dirname + ) + return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) + + def path(self, name): + """ + Return a local filesystem path where the file can be retrieved using + Python's built-in open() function. Storage systems that can't be + accessed using open() should *not* implement this method. + """ + raise NotImplementedError("This backend doesn't support absolute paths.") + + # The following methods form the public API for storage systems, but with + # no default implementations. Subclasses must implement *all* of these. + + def delete(self, name): + """ + Delete the specified file from the storage system. + """ + raise NotImplementedError( + "subclasses of Storage must provide a delete() method" + ) + + def exists(self, name): + """ + Return True if a file referenced by the given name already exists in the + storage system, or False if the name is available for a new file. + """ + raise NotImplementedError( + "subclasses of Storage must provide an exists() method" + ) + + def listdir(self, path): + """ + List the contents of the specified path. Return a 2-tuple of lists: + the first item being directories, the second item being files. + """ + raise NotImplementedError( + "subclasses of Storage must provide a listdir() method" + ) + + def size(self, name): + """ + Return the total size, in bytes, of the file specified by name. + """ + raise NotImplementedError("subclasses of Storage must provide a size() method") + + def url(self, name): + """ + Return an absolute URL where the file's contents can be accessed + directly by a web browser. + """ + raise NotImplementedError("subclasses of Storage must provide a url() method") + + def get_accessed_time(self, name): + """ + Return the last accessed time (as a datetime) of the file specified by + name. The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_accessed_time() method" + ) + + def get_created_time(self, name): + """ + Return the creation time (as a datetime) of the file specified by name. + The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_created_time() method" + ) + + def get_modified_time(self, name): + """ + Return the last modified time (as a datetime) of the file specified by + name. The datetime will be timezone-aware if USE_TZ=True. + """ + raise NotImplementedError( + "subclasses of Storage must provide a get_modified_time() method" + ) diff --git a/testbed/django__django/django/core/files/storage/filesystem.py b/testbed/django__django/django/core/files/storage/filesystem.py new file mode 100644 index 0000000000000000000000000000000000000000..85fc4eff9f3a53b25f8dbc44f4c982b6b95730b0 --- /dev/null +++ b/testbed/django__django/django/core/files/storage/filesystem.py @@ -0,0 +1,207 @@ +import os +from datetime import datetime, timezone +from urllib.parse import urljoin + +from django.conf import settings +from django.core.files import File, locks +from django.core.files.move import file_move_safe +from django.core.signals import setting_changed +from django.utils._os import safe_join +from django.utils.deconstruct import deconstructible +from django.utils.encoding import filepath_to_uri +from django.utils.functional import cached_property + +from .base import Storage +from .mixins import StorageSettingsMixin + + +@deconstructible(path="django.core.files.storage.FileSystemStorage") +class FileSystemStorage(Storage, StorageSettingsMixin): + """ + Standard filesystem storage + """ + + # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if + # the file already exists before it's opened. + OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0) + + def __init__( + self, + location=None, + base_url=None, + file_permissions_mode=None, + directory_permissions_mode=None, + ): + self._location = location + self._base_url = base_url + self._file_permissions_mode = file_permissions_mode + self._directory_permissions_mode = directory_permissions_mode + setting_changed.connect(self._clear_cached_properties) + + @cached_property + def base_location(self): + return self._value_or_setting(self._location, settings.MEDIA_ROOT) + + @cached_property + def location(self): + return os.path.abspath(self.base_location) + + @cached_property + def base_url(self): + if self._base_url is not None and not self._base_url.endswith("/"): + self._base_url += "/" + return self._value_or_setting(self._base_url, settings.MEDIA_URL) + + @cached_property + def file_permissions_mode(self): + return self._value_or_setting( + self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS + ) + + @cached_property + def directory_permissions_mode(self): + return self._value_or_setting( + self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS + ) + + def _open(self, name, mode="rb"): + return File(open(self.path(name), mode)) + + def _save(self, name, content): + full_path = self.path(name) + + # Create any intermediate directories that do not exist. + directory = os.path.dirname(full_path) + try: + if self.directory_permissions_mode is not None: + # Set the umask because os.makedirs() doesn't apply the "mode" + # argument to intermediate-level directories. + old_umask = os.umask(0o777 & ~self.directory_permissions_mode) + try: + os.makedirs( + directory, self.directory_permissions_mode, exist_ok=True + ) + finally: + os.umask(old_umask) + else: + os.makedirs(directory, exist_ok=True) + except FileExistsError: + raise FileExistsError("%s exists and is not a directory." % directory) + + # There's a potential race condition between get_available_name and + # saving the file; it's possible that two threads might return the + # same name, at which point all sorts of fun happens. So we need to + # try to create the file, but if it already exists we have to go back + # to get_available_name() and try again. + + while True: + try: + # This file has a file path that we can move. + if hasattr(content, "temporary_file_path"): + file_move_safe(content.temporary_file_path(), full_path) + + # This is a normal uploadedfile that we can stream. + else: + # The current umask value is masked out by os.open! + fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) + _file = None + try: + locks.lock(fd, locks.LOCK_EX) + for chunk in content.chunks(): + if _file is None: + mode = "wb" if isinstance(chunk, bytes) else "wt" + _file = os.fdopen(fd, mode) + _file.write(chunk) + finally: + locks.unlock(fd) + if _file is not None: + _file.close() + else: + os.close(fd) + except FileExistsError: + # A new name is needed if the file exists. + name = self.get_available_name(name) + full_path = self.path(name) + else: + # OK, the file save worked. Break out of the loop. + break + + if self.file_permissions_mode is not None: + os.chmod(full_path, self.file_permissions_mode) + + # Ensure the saved path is always relative to the storage root. + name = os.path.relpath(full_path, self.location) + # Ensure the moved file has the same gid as the storage root. + self._ensure_location_group_id(full_path) + # Store filenames with forward slashes, even on Windows. + return str(name).replace("\\", "/") + + def _ensure_location_group_id(self, full_path): + if os.name == "posix": + file_gid = os.stat(full_path).st_gid + location_gid = os.stat(self.location).st_gid + if file_gid != location_gid: + try: + os.chown(full_path, uid=-1, gid=location_gid) + except PermissionError: + pass + + def delete(self, name): + if not name: + raise ValueError("The name must be given to delete().") + name = self.path(name) + # If the file or directory exists, delete it from the filesystem. + try: + if os.path.isdir(name): + os.rmdir(name) + else: + os.remove(name) + except FileNotFoundError: + # FileNotFoundError is raised if the file or directory was removed + # concurrently. + pass + + def exists(self, name): + return os.path.lexists(self.path(name)) + + def listdir(self, path): + path = self.path(path) + directories, files = [], [] + with os.scandir(path) as entries: + for entry in entries: + if entry.is_dir(): + directories.append(entry.name) + else: + files.append(entry.name) + return directories, files + + def path(self, name): + return safe_join(self.location, name) + + def size(self, name): + return os.path.getsize(self.path(name)) + + def url(self, name): + if self.base_url is None: + raise ValueError("This file is not accessible via a URL.") + url = filepath_to_uri(name) + if url is not None: + url = url.lstrip("/") + return urljoin(self.base_url, url) + + def _datetime_from_timestamp(self, ts): + """ + If timezone support is enabled, make an aware datetime object in UTC; + otherwise make a naive one in the local timezone. + """ + tz = timezone.utc if settings.USE_TZ else None + return datetime.fromtimestamp(ts, tz=tz) + + def get_accessed_time(self, name): + return self._datetime_from_timestamp(os.path.getatime(self.path(name))) + + def get_created_time(self, name): + return self._datetime_from_timestamp(os.path.getctime(self.path(name))) + + def get_modified_time(self, name): + return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) diff --git a/testbed/django__django/django/core/files/storage/handler.py b/testbed/django__django/django/core/files/storage/handler.py new file mode 100644 index 0000000000000000000000000000000000000000..ad2054c8374fac303988b611db2fc93ba272200f --- /dev/null +++ b/testbed/django__django/django/core/files/storage/handler.py @@ -0,0 +1,55 @@ +from django.conf import DEFAULT_STORAGE_ALIAS, STATICFILES_STORAGE_ALIAS, settings +from django.core.exceptions import ImproperlyConfigured +from django.utils.functional import cached_property +from django.utils.module_loading import import_string + + +class InvalidStorageError(ImproperlyConfigured): + pass + + +class StorageHandler: + def __init__(self, backends=None): + # backends is an optional dict of storage backend definitions + # (structured like settings.STORAGES). + self._backends = backends + self._storages = {} + + @cached_property + def backends(self): + if self._backends is None: + self._backends = settings.STORAGES.copy() + # RemovedInDjango51Warning. + if settings.is_overridden("DEFAULT_FILE_STORAGE"): + self._backends[DEFAULT_STORAGE_ALIAS] = { + "BACKEND": settings.DEFAULT_FILE_STORAGE + } + if settings.is_overridden("STATICFILES_STORAGE"): + self._backends[STATICFILES_STORAGE_ALIAS] = { + "BACKEND": settings.STATICFILES_STORAGE + } + return self._backends + + def __getitem__(self, alias): + try: + return self._storages[alias] + except KeyError: + try: + params = self.backends[alias] + except KeyError: + raise InvalidStorageError( + f"Could not find config for '{alias}' in settings.STORAGES." + ) + storage = self.create_storage(params) + self._storages[alias] = storage + return storage + + def create_storage(self, params): + params = params.copy() + backend = params.pop("BACKEND") + options = params.pop("OPTIONS", {}) + try: + storage_cls = import_string(backend) + except ImportError as e: + raise InvalidStorageError(f"Could not find backend {backend!r}: {e}") from e + return storage_cls(**options) diff --git a/testbed/django__django/django/core/files/storage/memory.py b/testbed/django__django/django/core/files/storage/memory.py new file mode 100644 index 0000000000000000000000000000000000000000..39a8aef3aea8d4088814a37e461c5a311aa11924 --- /dev/null +++ b/testbed/django__django/django/core/files/storage/memory.py @@ -0,0 +1,290 @@ +""" +Based on dj-inmemorystorage (BSD) by Cody Soyland, Seán Hayes, Tore Birkeland, +and Nick Presta. +""" + +import errno +import io +import os +import pathlib +from urllib.parse import urljoin + +from django.conf import settings +from django.core.files.base import ContentFile +from django.core.signals import setting_changed +from django.utils._os import safe_join +from django.utils.deconstruct import deconstructible +from django.utils.encoding import filepath_to_uri +from django.utils.functional import cached_property +from django.utils.timezone import now + +from .base import Storage +from .mixins import StorageSettingsMixin + +__all__ = ("InMemoryStorage",) + + +class TimingMixin: + def _initialize_times(self): + self.created_time = now() + self.accessed_time = self.created_time + self.modified_time = self.created_time + + def _update_accessed_time(self): + self.accessed_time = now() + + def _update_modified_time(self): + self.modified_time = now() + + +class InMemoryFileNode(ContentFile, TimingMixin): + """ + Helper class representing an in-memory file node. + + Handle unicode/bytes conversion during I/O operations and record creation, + modification, and access times. + """ + + def __init__(self, content="", name=""): + self.file = None + self._content_type = type(content) + self._initialize_stream() + self._initialize_times() + + def open(self, mode): + self._convert_stream_content(mode) + self._update_accessed_time() + return super().open(mode) + + def write(self, data): + super().write(data) + self._update_modified_time() + + def _initialize_stream(self): + """Initialize underlying stream according to the content type.""" + self.file = io.BytesIO() if self._content_type == bytes else io.StringIO() + + def _convert_stream_content(self, mode): + """Convert actual file content according to the opening mode.""" + new_content_type = bytes if "b" in mode else str + # No conversion needed. + if self._content_type == new_content_type: + return + + content = self.file.getvalue() + content = content.encode() if isinstance(content, str) else content.decode() + self._content_type = new_content_type + self._initialize_stream() + + self.file.write(content) + + +class InMemoryDirNode(TimingMixin): + """ + Helper class representing an in-memory directory node. + + Handle path navigation of directory trees, creating missing nodes if + needed. + """ + + def __init__(self): + self._children = {} + self._initialize_times() + + def resolve(self, path, create_if_missing=False, leaf_cls=None, check_exists=True): + """ + Navigate current directory tree, returning node matching path or + creating a new one, if missing. + - path: path of the node to search + - create_if_missing: create nodes if not exist. Defaults to False. + - leaf_cls: expected type of leaf node. Defaults to None. + - check_exists: if True and the leaf node does not exist, raise a + FileNotFoundError. Defaults to True. + """ + path_segments = list(pathlib.Path(path).parts) + current_node = self + + while path_segments: + path_segment = path_segments.pop(0) + # If current node is a file node and there are unprocessed + # segments, raise an error. + if isinstance(current_node, InMemoryFileNode): + path_segments = os.path.split(path) + current_path = "/".join( + path_segments[: path_segments.index(path_segment)] + ) + raise NotADirectoryError( + errno.ENOTDIR, os.strerror(errno.ENOTDIR), current_path + ) + current_node = current_node._resolve_child( + path_segment, + create_if_missing, + leaf_cls if len(path_segments) == 0 else InMemoryDirNode, + ) + if current_node is None: + break + + if current_node is None and check_exists: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path) + + # If a leaf_cls is not None, check if leaf node is of right type. + if leaf_cls and not isinstance(current_node, leaf_cls): + error_cls, error_code = ( + (NotADirectoryError, errno.ENOTDIR) + if leaf_cls is InMemoryDirNode + else (IsADirectoryError, errno.EISDIR) + ) + raise error_cls(error_code, os.strerror(error_code), path) + + return current_node + + def _resolve_child(self, path_segment, create_if_missing, child_cls): + if create_if_missing: + self._update_accessed_time() + self._update_modified_time() + return self._children.setdefault(path_segment, child_cls()) + return self._children.get(path_segment) + + def listdir(self): + directories, files = [], [] + for name, entry in self._children.items(): + if isinstance(entry, InMemoryDirNode): + directories.append(name) + else: + files.append(name) + return directories, files + + def remove_child(self, name): + if name in self._children: + self._update_accessed_time() + self._update_modified_time() + del self._children[name] + + +@deconstructible(path="django.core.files.storage.InMemoryStorage") +class InMemoryStorage(Storage, StorageSettingsMixin): + """A storage saving files in memory.""" + + def __init__( + self, + location=None, + base_url=None, + file_permissions_mode=None, + directory_permissions_mode=None, + ): + self._location = location + self._base_url = base_url + self._file_permissions_mode = file_permissions_mode + self._directory_permissions_mode = directory_permissions_mode + self._root = InMemoryDirNode() + self._resolve( + self.base_location, create_if_missing=True, leaf_cls=InMemoryDirNode + ) + setting_changed.connect(self._clear_cached_properties) + + @cached_property + def base_location(self): + return self._value_or_setting(self._location, settings.MEDIA_ROOT) + + @cached_property + def location(self): + return os.path.abspath(self.base_location) + + @cached_property + def base_url(self): + if self._base_url is not None and not self._base_url.endswith("/"): + self._base_url += "/" + return self._value_or_setting(self._base_url, settings.MEDIA_URL) + + @cached_property + def file_permissions_mode(self): + return self._value_or_setting( + self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS + ) + + @cached_property + def directory_permissions_mode(self): + return self._value_or_setting( + self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS + ) + + def _relative_path(self, name): + full_path = self.path(name) + return os.path.relpath(full_path, self.location) + + def _resolve(self, name, create_if_missing=False, leaf_cls=None, check_exists=True): + try: + relative_path = self._relative_path(name) + return self._root.resolve( + relative_path, + create_if_missing=create_if_missing, + leaf_cls=leaf_cls, + check_exists=check_exists, + ) + except NotADirectoryError as exc: + absolute_path = self.path(exc.filename) + raise FileExistsError(f"{absolute_path} exists and is not a directory.") + + def _open(self, name, mode="rb"): + create_if_missing = "w" in mode + file_node = self._resolve( + name, create_if_missing=create_if_missing, leaf_cls=InMemoryFileNode + ) + return file_node.open(mode) + + def _save(self, name, content): + file_node = self._resolve( + name, create_if_missing=True, leaf_cls=InMemoryFileNode + ) + fd = None + for chunk in content.chunks(): + if fd is None: + mode = "wb" if isinstance(chunk, bytes) else "wt" + fd = file_node.open(mode) + fd.write(chunk) + + if hasattr(content, "temporary_file_path"): + os.remove(content.temporary_file_path()) + + file_node.modified_time = now() + return self._relative_path(name).replace("\\", "/") + + def path(self, name): + return safe_join(self.location, name) + + def delete(self, name): + path, filename = os.path.split(name) + dir_node = self._resolve(path, check_exists=False) + if dir_node is None: + return None + dir_node.remove_child(filename) + + def exists(self, name): + return self._resolve(name, check_exists=False) is not None + + def listdir(self, path): + node = self._resolve(path, leaf_cls=InMemoryDirNode) + return node.listdir() + + def size(self, name): + return len(self._open(name, "rb").file.getvalue()) + + def url(self, name): + if self.base_url is None: + raise ValueError("This file is not accessible via a URL.") + url = filepath_to_uri(name) + if url is not None: + url = url.lstrip("/") + return urljoin(self.base_url, url) + + def get_accessed_time(self, name): + file_node = self._resolve(name) + return file_node.accessed_time + + def get_created_time(self, name): + file_node = self._resolve(name) + return file_node.created_time + + def get_modified_time(self, name): + file_node = self._resolve(name) + return file_node.modified_time diff --git a/testbed/django__django/django/core/files/storage/mixins.py b/testbed/django__django/django/core/files/storage/mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..663a163beae1f8780a88003792849fbd7b201fd8 --- /dev/null +++ b/testbed/django__django/django/core/files/storage/mixins.py @@ -0,0 +1,15 @@ +class StorageSettingsMixin: + def _clear_cached_properties(self, setting, **kwargs): + """Reset setting based property values.""" + if setting == "MEDIA_ROOT": + self.__dict__.pop("base_location", None) + self.__dict__.pop("location", None) + elif setting == "MEDIA_URL": + self.__dict__.pop("base_url", None) + elif setting == "FILE_UPLOAD_PERMISSIONS": + self.__dict__.pop("file_permissions_mode", None) + elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS": + self.__dict__.pop("directory_permissions_mode", None) + + def _value_or_setting(self, value, setting): + return setting if value is None else value diff --git a/testbed/django__django/django/core/files/uploadedfile.py b/testbed/django__django/django/core/files/uploadedfile.py new file mode 100644 index 0000000000000000000000000000000000000000..efbfcac4c872cf1e928f3eb4721da803af335482 --- /dev/null +++ b/testbed/django__django/django/core/files/uploadedfile.py @@ -0,0 +1,150 @@ +""" +Classes representing uploaded files. +""" + +import os +from io import BytesIO + +from django.conf import settings +from django.core.files import temp as tempfile +from django.core.files.base import File +from django.core.files.utils import validate_file_name + +__all__ = ( + "UploadedFile", + "TemporaryUploadedFile", + "InMemoryUploadedFile", + "SimpleUploadedFile", +) + + +class UploadedFile(File): + """ + An abstract uploaded file (``TemporaryUploadedFile`` and + ``InMemoryUploadedFile`` are the built-in concrete subclasses). + + An ``UploadedFile`` object behaves somewhat like a file object and + represents some file data that the user submitted with a form. + """ + + def __init__( + self, + file=None, + name=None, + content_type=None, + size=None, + charset=None, + content_type_extra=None, + ): + super().__init__(file, name) + self.size = size + self.content_type = content_type + self.charset = charset + self.content_type_extra = content_type_extra + + def __repr__(self): + return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type) + + def _get_name(self): + return self._name + + def _set_name(self, name): + # Sanitize the file name so that it can't be dangerous. + if name is not None: + # Just use the basename of the file -- anything else is dangerous. + name = os.path.basename(name) + + # File names longer than 255 characters can cause problems on older OSes. + if len(name) > 255: + name, ext = os.path.splitext(name) + ext = ext[:255] + name = name[: 255 - len(ext)] + ext + + name = validate_file_name(name) + + self._name = name + + name = property(_get_name, _set_name) + + +class TemporaryUploadedFile(UploadedFile): + """ + A file uploaded to a temporary location (i.e. stream-to-disk). + """ + + def __init__(self, name, content_type, size, charset, content_type_extra=None): + _, ext = os.path.splitext(name) + file = tempfile.NamedTemporaryFile( + suffix=".upload" + ext, dir=settings.FILE_UPLOAD_TEMP_DIR + ) + super().__init__(file, name, content_type, size, charset, content_type_extra) + + def temporary_file_path(self): + """Return the full path of this file.""" + return self.file.name + + def close(self): + try: + return self.file.close() + except FileNotFoundError: + # The file was moved or deleted before the tempfile could unlink + # it. Still sets self.file.close_called and calls + # self.file.file.close() before the exception. + pass + + +class InMemoryUploadedFile(UploadedFile): + """ + A file uploaded into memory (i.e. stream-to-memory). + """ + + def __init__( + self, + file, + field_name, + name, + content_type, + size, + charset, + content_type_extra=None, + ): + super().__init__(file, name, content_type, size, charset, content_type_extra) + self.field_name = field_name + + def open(self, mode=None): + self.file.seek(0) + return self + + def chunks(self, chunk_size=None): + self.file.seek(0) + yield self.read() + + def multiple_chunks(self, chunk_size=None): + # Since it's in memory, we'll never have multiple chunks. + return False + + +class SimpleUploadedFile(InMemoryUploadedFile): + """ + A simple representation of a file, which just has content, size, and a name. + """ + + def __init__(self, name, content, content_type="text/plain"): + content = content or b"" + super().__init__( + BytesIO(content), None, name, content_type, len(content), None, None + ) + + @classmethod + def from_dict(cls, file_dict): + """ + Create a SimpleUploadedFile object from a dictionary with keys: + - filename + - content-type + - content + """ + return cls( + file_dict["filename"], + file_dict["content"], + file_dict.get("content-type", "text/plain"), + ) diff --git a/testbed/django__django/django/core/files/uploadhandler.py b/testbed/django__django/django/core/files/uploadhandler.py new file mode 100644 index 0000000000000000000000000000000000000000..b6c185e8fc720e9948277cf67699f3dbb8acf2e2 --- /dev/null +++ b/testbed/django__django/django/core/files/uploadhandler.py @@ -0,0 +1,251 @@ +""" +Base file upload handler classes, and the built-in concrete subclasses +""" +import os +from io import BytesIO + +from django.conf import settings +from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile +from django.utils.module_loading import import_string + +__all__ = [ + "UploadFileException", + "StopUpload", + "SkipFile", + "FileUploadHandler", + "TemporaryFileUploadHandler", + "MemoryFileUploadHandler", + "load_handler", + "StopFutureHandlers", +] + + +class UploadFileException(Exception): + """ + Any error having to do with uploading files. + """ + + pass + + +class StopUpload(UploadFileException): + """ + This exception is raised when an upload must abort. + """ + + def __init__(self, connection_reset=False): + """ + If ``connection_reset`` is ``True``, Django knows will halt the upload + without consuming the rest of the upload. This will cause the browser to + show a "connection reset" error. + """ + self.connection_reset = connection_reset + + def __str__(self): + if self.connection_reset: + return "StopUpload: Halt current upload." + else: + return "StopUpload: Consume request data, then halt." + + +class SkipFile(UploadFileException): + """ + This exception is raised by an upload handler that wants to skip a given file. + """ + + pass + + +class StopFutureHandlers(UploadFileException): + """ + Upload handlers that have handled a file and do not want future handlers to + run should raise this exception instead of returning None. + """ + + pass + + +class FileUploadHandler: + """ + Base class for streaming upload handlers. + """ + + chunk_size = 64 * 2**10 # : The default chunk size is 64 KB. + + def __init__(self, request=None): + self.file_name = None + self.content_type = None + self.content_length = None + self.charset = None + self.content_type_extra = None + self.request = request + + def handle_raw_input( + self, input_data, META, content_length, boundary, encoding=None + ): + """ + Handle the raw input from the client. + + Parameters: + + :input_data: + An object that supports reading via .read(). + :META: + ``request.META``. + :content_length: + The (integer) value of the Content-Length header from the + client. + :boundary: The boundary from the Content-Type header. Be sure to + prepend two '--'. + """ + pass + + def new_file( + self, + field_name, + file_name, + content_type, + content_length, + charset=None, + content_type_extra=None, + ): + """ + Signal that a new file has been started. + + Warning: As with any data from the client, you should not trust + content_length (and sometimes won't even get it). + """ + self.field_name = field_name + self.file_name = file_name + self.content_type = content_type + self.content_length = content_length + self.charset = charset + self.content_type_extra = content_type_extra + + def receive_data_chunk(self, raw_data, start): + """ + Receive data from the streamed upload parser. ``start`` is the position + in the file of the chunk. + """ + raise NotImplementedError( + "subclasses of FileUploadHandler must provide a receive_data_chunk() method" + ) + + def file_complete(self, file_size): + """ + Signal that a file has completed. File size corresponds to the actual + size accumulated by all the chunks. + + Subclasses should return a valid ``UploadedFile`` object. + """ + raise NotImplementedError( + "subclasses of FileUploadHandler must provide a file_complete() method" + ) + + def upload_complete(self): + """ + Signal that the upload is complete. Subclasses should perform cleanup + that is necessary for this handler. + """ + pass + + def upload_interrupted(self): + """ + Signal that the upload was interrupted. Subclasses should perform + cleanup that is necessary for this handler. + """ + pass + + +class TemporaryFileUploadHandler(FileUploadHandler): + """ + Upload handler that streams data into a temporary file. + """ + + def new_file(self, *args, **kwargs): + """ + Create the file object to append to as data is coming in. + """ + super().new_file(*args, **kwargs) + self.file = TemporaryUploadedFile( + self.file_name, self.content_type, 0, self.charset, self.content_type_extra + ) + + def receive_data_chunk(self, raw_data, start): + self.file.write(raw_data) + + def file_complete(self, file_size): + self.file.seek(0) + self.file.size = file_size + return self.file + + def upload_interrupted(self): + if hasattr(self, "file"): + temp_location = self.file.temporary_file_path() + try: + self.file.close() + os.remove(temp_location) + except FileNotFoundError: + pass + + +class MemoryFileUploadHandler(FileUploadHandler): + """ + File upload handler to stream uploads into memory (used for small files). + """ + + def handle_raw_input( + self, input_data, META, content_length, boundary, encoding=None + ): + """ + Use the content_length to signal whether or not this handler should be + used. + """ + # Check the content-length header to see if we should + # If the post is too large, we cannot use the Memory handler. + self.activated = content_length <= settings.FILE_UPLOAD_MAX_MEMORY_SIZE + + def new_file(self, *args, **kwargs): + super().new_file(*args, **kwargs) + if self.activated: + self.file = BytesIO() + raise StopFutureHandlers() + + def receive_data_chunk(self, raw_data, start): + """Add the data to the BytesIO file.""" + if self.activated: + self.file.write(raw_data) + else: + return raw_data + + def file_complete(self, file_size): + """Return a file object if this handler is activated.""" + if not self.activated: + return + + self.file.seek(0) + return InMemoryUploadedFile( + file=self.file, + field_name=self.field_name, + name=self.file_name, + content_type=self.content_type, + size=file_size, + charset=self.charset, + content_type_extra=self.content_type_extra, + ) + + +def load_handler(path, *args, **kwargs): + """ + Given a path to a handler, return an instance of that handler. + + E.g.:: + >>> from django.http import HttpRequest + >>> request = HttpRequest() + >>> load_handler( + ... 'django.core.files.uploadhandler.TemporaryFileUploadHandler', + ... request, + ... ) + + """ + return import_string(path)(*args, **kwargs) diff --git a/testbed/django__django/django/core/handlers/__init__.py b/testbed/django__django/django/core/handlers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/testbed/django__django/django/core/handlers/asgi.py b/testbed/django__django/django/core/handlers/asgi.py new file mode 100644 index 0000000000000000000000000000000000000000..0edc98854f07ad06b1d39402a60ec55641714200 --- /dev/null +++ b/testbed/django__django/django/core/handlers/asgi.py @@ -0,0 +1,355 @@ +import asyncio +import logging +import sys +import tempfile +import traceback +from contextlib import aclosing + +from asgiref.sync import ThreadSensitiveContext, sync_to_async + +from django.conf import settings +from django.core import signals +from django.core.exceptions import RequestAborted, RequestDataTooBig +from django.core.handlers import base +from django.http import ( + FileResponse, + HttpRequest, + HttpResponse, + HttpResponseBadRequest, + HttpResponseServerError, + QueryDict, + parse_cookie, +) +from django.urls import set_script_prefix +from django.utils.functional import cached_property + +logger = logging.getLogger("django.request") + + +def get_script_prefix(scope): + """ + Return the script prefix to use from either the scope or a setting. + """ + if settings.FORCE_SCRIPT_NAME: + return settings.FORCE_SCRIPT_NAME + return scope.get("root_path", "") or "" + + +class ASGIRequest(HttpRequest): + """ + Custom request subclass that decodes from an ASGI-standard request dict + and wraps request body handling. + """ + + # Number of seconds until a Request gives up on trying to read a request + # body and aborts. + body_receive_timeout = 60 + + def __init__(self, scope, body_file): + self.scope = scope + self._post_parse_error = False + self._read_started = False + self.resolver_match = None + self.script_name = get_script_prefix(scope) + if self.script_name: + # TODO: Better is-prefix checking, slash handling? + self.path_info = scope["path"].removeprefix(self.script_name) + else: + self.path_info = scope["path"] + # The Django path is different from ASGI scope path args, it should + # combine with script name. + if self.script_name: + self.path = "%s/%s" % ( + self.script_name.rstrip("/"), + self.path_info.replace("/", "", 1), + ) + else: + self.path = scope["path"] + # HTTP basics. + self.method = self.scope["method"].upper() + # Ensure query string is encoded correctly. + query_string = self.scope.get("query_string", "") + if isinstance(query_string, bytes): + query_string = query_string.decode() + self.META = { + "REQUEST_METHOD": self.method, + "QUERY_STRING": query_string, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": self.path_info, + # WSGI-expecting code will need these for a while + "wsgi.multithread": True, + "wsgi.multiprocess": True, + } + if self.scope.get("client"): + self.META["REMOTE_ADDR"] = self.scope["client"][0] + self.META["REMOTE_HOST"] = self.META["REMOTE_ADDR"] + self.META["REMOTE_PORT"] = self.scope["client"][1] + if self.scope.get("server"): + self.META["SERVER_NAME"] = self.scope["server"][0] + self.META["SERVER_PORT"] = str(self.scope["server"][1]) + else: + self.META["SERVER_NAME"] = "unknown" + self.META["SERVER_PORT"] = "0" + # Headers go into META. + for name, value in self.scope.get("headers", []): + name = name.decode("latin1") + if name == "content-length": + corrected_name = "CONTENT_LENGTH" + elif name == "content-type": + corrected_name = "CONTENT_TYPE" + else: + corrected_name = "HTTP_%s" % name.upper().replace("-", "_") + # HTTP/2 say only ASCII chars are allowed in headers, but decode + # latin1 just in case. + value = value.decode("latin1") + if corrected_name in self.META: + value = self.META[corrected_name] + "," + value + self.META[corrected_name] = value + # Pull out request encoding, if provided. + self._set_content_type_params(self.META) + # Directly assign the body file to be our stream. + self._stream = body_file + # Other bits. + self.resolver_match = None + + @cached_property + def GET(self): + return QueryDict(self.META["QUERY_STRING"]) + + def _get_scheme(self): + return self.scope.get("scheme") or super()._get_scheme() + + def _get_post(self): + if not hasattr(self, "_post"): + self._load_post_and_files() + return self._post + + def _set_post(self, post): + self._post = post + + def _get_files(self): + if not hasattr(self, "_files"): + self._load_post_and_files() + return self._files + + POST = property(_get_post, _set_post) + FILES = property(_get_files) + + @cached_property + def COOKIES(self): + return parse_cookie(self.META.get("HTTP_COOKIE", "")) + + def close(self): + super().close() + self._stream.close() + + +class ASGIHandler(base.BaseHandler): + """Handler for ASGI requests.""" + + request_class = ASGIRequest + # Size to chunk response bodies into for multiple response messages. + chunk_size = 2**16 + + def __init__(self): + super().__init__() + self.load_middleware(is_async=True) + + async def __call__(self, scope, receive, send): + """ + Async entrypoint - parses the request and hands off to get_response. + """ + # Serve only HTTP connections. + # FIXME: Allow to override this. + if scope["type"] != "http": + raise ValueError( + "Django can only handle ASGI/HTTP connections, not %s." % scope["type"] + ) + + async with ThreadSensitiveContext(): + await self.handle(scope, receive, send) + + async def handle(self, scope, receive, send): + """ + Handles the ASGI request. Called via the __call__ method. + """ + # Receive the HTTP request body as a stream object. + try: + body_file = await self.read_body(receive) + except RequestAborted: + return + # Request is complete and can be served. + set_script_prefix(get_script_prefix(scope)) + await signals.request_started.asend(sender=self.__class__, scope=scope) + # Get the request and check for basic issues. + request, error_response = self.create_request(scope, body_file) + if request is None: + body_file.close() + await self.send_response(error_response, send) + return + # Try to catch a disconnect while getting response. + tasks = [ + asyncio.create_task(self.run_get_response(request)), + asyncio.create_task(self.listen_for_disconnect(receive)), + ] + done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) + done, pending = done.pop(), pending.pop() + # Allow views to handle cancellation. + pending.cancel() + try: + await pending + except asyncio.CancelledError: + # Task re-raised the CancelledError as expected. + pass + try: + response = done.result() + except RequestAborted: + body_file.close() + return + except AssertionError: + body_file.close() + raise + # Send the response. + await self.send_response(response, send) + + async def listen_for_disconnect(self, receive): + """Listen for disconnect from the client.""" + message = await receive() + if message["type"] == "http.disconnect": + raise RequestAborted() + # This should never happen. + assert False, "Invalid ASGI message after request body: %s" % message["type"] + + async def run_get_response(self, request): + """Get async response.""" + # Use the async mode of BaseHandler. + response = await self.get_response_async(request) + response._handler_class = self.__class__ + # Increase chunk size on file responses (ASGI servers handles low-level + # chunking). + if isinstance(response, FileResponse): + response.block_size = self.chunk_size + return response + + async def read_body(self, receive): + """Reads an HTTP body from an ASGI connection.""" + # Use the tempfile that auto rolls-over to a disk file as it fills up. + body_file = tempfile.SpooledTemporaryFile( + max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode="w+b" + ) + while True: + message = await receive() + if message["type"] == "http.disconnect": + body_file.close() + # Early client disconnect. + raise RequestAborted() + # Add a body chunk from the message, if provided. + if "body" in message: + body_file.write(message["body"]) + # Quit out if that's the end. + if not message.get("more_body", False): + break + body_file.seek(0) + return body_file + + def create_request(self, scope, body_file): + """ + Create the Request object and returns either (request, None) or + (None, response) if there is an error response. + """ + try: + return self.request_class(scope, body_file), None + except UnicodeDecodeError: + logger.warning( + "Bad Request (UnicodeDecodeError)", + exc_info=sys.exc_info(), + extra={"status_code": 400}, + ) + return None, HttpResponseBadRequest() + except RequestDataTooBig: + return None, HttpResponse("413 Payload too large", status=413) + + def handle_uncaught_exception(self, request, resolver, exc_info): + """Last-chance handler for exceptions.""" + # There's no WSGI server to catch the exception further up + # if this fails, so translate it into a plain text response. + try: + return super().handle_uncaught_exception(request, resolver, exc_info) + except Exception: + return HttpResponseServerError( + traceback.format_exc() if settings.DEBUG else "Internal Server Error", + content_type="text/plain", + ) + + async def send_response(self, response, send): + """Encode and send a response out over ASGI.""" + # Collect cookies into headers. Have to preserve header case as there + # are some non-RFC compliant clients that require e.g. Content-Type. + response_headers = [] + for header, value in response.items(): + if isinstance(header, str): + header = header.encode("ascii") + if isinstance(value, str): + value = value.encode("latin1") + response_headers.append((bytes(header), bytes(value))) + for c in response.cookies.values(): + response_headers.append( + (b"Set-Cookie", c.output(header="").encode("ascii").strip()) + ) + # Initial response message. + await send( + { + "type": "http.response.start", + "status": response.status_code, + "headers": response_headers, + } + ) + # Streaming responses need to be pinned to their iterator. + if response.streaming: + # - Consume via `__aiter__` and not `streaming_content` directly, to + # allow mapping of a sync iterator. + # - Use aclosing() when consuming aiter. + # See https://github.com/python/cpython/commit/6e8dcda + async with aclosing(aiter(response)) as content: + async for part in content: + for chunk, _ in self.chunk_bytes(part): + await send( + { + "type": "http.response.body", + "body": chunk, + # Ignore "more" as there may be more parts; instead, + # use an empty final closing message with False. + "more_body": True, + } + ) + # Final closing message. + await send({"type": "http.response.body"}) + # Other responses just need chunking. + else: + # Yield chunks of response. + for chunk, last in self.chunk_bytes(response.content): + await send( + { + "type": "http.response.body", + "body": chunk, + "more_body": not last, + } + ) + await sync_to_async(response.close, thread_sensitive=True)() + + @classmethod + def chunk_bytes(cls, data): + """ + Chunks some data up so it can be sent in reasonable size messages. + Yields (chunk, last_chunk) tuples. + """ + position = 0 + if not data: + yield data, True + return + while position < len(data): + yield ( + data[position : position + cls.chunk_size], + (position + cls.chunk_size) >= len(data), + ) + position += cls.chunk_size diff --git a/testbed/django__django/django/core/handlers/base.py b/testbed/django__django/django/core/handlers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..8911543d4e9ee7a3711242d6847af984a32ba86b --- /dev/null +++ b/testbed/django__django/django/core/handlers/base.py @@ -0,0 +1,373 @@ +import asyncio +import logging +import types + +from asgiref.sync import async_to_sync, iscoroutinefunction, sync_to_async + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed +from django.core.signals import request_finished +from django.db import connections, transaction +from django.urls import get_resolver, set_urlconf +from django.utils.log import log_response +from django.utils.module_loading import import_string + +from .exception import convert_exception_to_response + +logger = logging.getLogger("django.request") + + +class BaseHandler: + _view_middleware = None + _template_response_middleware = None + _exception_middleware = None + _middleware_chain = None + + def load_middleware(self, is_async=False): + """ + Populate middleware lists from settings.MIDDLEWARE. + + Must be called after the environment is fixed (see __call__ in subclasses). + """ + self._view_middleware = [] + self._template_response_middleware = [] + self._exception_middleware = [] + + get_response = self._get_response_async if is_async else self._get_response + handler = convert_exception_to_response(get_response) + handler_is_async = is_async + for middleware_path in reversed(settings.MIDDLEWARE): + middleware = import_string(middleware_path) + middleware_can_sync = getattr(middleware, "sync_capable", True) + middleware_can_async = getattr(middleware, "async_capable", False) + if not middleware_can_sync and not middleware_can_async: + raise RuntimeError( + "Middleware %s must have at least one of " + "sync_capable/async_capable set to True." % middleware_path + ) + elif not handler_is_async and middleware_can_sync: + middleware_is_async = False + else: + middleware_is_async = middleware_can_async + try: + # Adapt handler, if needed. + adapted_handler = self.adapt_method_mode( + middleware_is_async, + handler, + handler_is_async, + debug=settings.DEBUG, + name="middleware %s" % middleware_path, + ) + mw_instance = middleware(adapted_handler) + except MiddlewareNotUsed as exc: + if settings.DEBUG: + if str(exc): + logger.debug("MiddlewareNotUsed(%r): %s", middleware_path, exc) + else: + logger.debug("MiddlewareNotUsed: %r", middleware_path) + continue + else: + handler = adapted_handler + + if mw_instance is None: + raise ImproperlyConfigured( + "Middleware factory %s returned None." % middleware_path + ) + + if hasattr(mw_instance, "process_view"): + self._view_middleware.insert( + 0, + self.adapt_method_mode(is_async, mw_instance.process_view), + ) + if hasattr(mw_instance, "process_template_response"): + self._template_response_middleware.append( + self.adapt_method_mode( + is_async, mw_instance.process_template_response + ), + ) + if hasattr(mw_instance, "process_exception"): + # The exception-handling stack is still always synchronous for + # now, so adapt that way. + self._exception_middleware.append( + self.adapt_method_mode(False, mw_instance.process_exception), + ) + + handler = convert_exception_to_response(mw_instance) + handler_is_async = middleware_is_async + + # Adapt the top of the stack, if needed. + handler = self.adapt_method_mode(is_async, handler, handler_is_async) + # We only assign to this when initialization is complete as it is used + # as a flag for initialization being complete. + self._middleware_chain = handler + + def adapt_method_mode( + self, + is_async, + method, + method_is_async=None, + debug=False, + name=None, + ): + """ + Adapt a method to be in the correct "mode": + - If is_async is False: + - Synchronous methods are left alone + - Asynchronous methods are wrapped with async_to_sync + - If is_async is True: + - Synchronous methods are wrapped with sync_to_async() + - Asynchronous methods are left alone + """ + if method_is_async is None: + method_is_async = iscoroutinefunction(method) + if debug and not name: + name = name or "method %s()" % method.__qualname__ + if is_async: + if not method_is_async: + if debug: + logger.debug("Synchronous handler adapted for %s.", name) + return sync_to_async(method, thread_sensitive=True) + elif method_is_async: + if debug: + logger.debug("Asynchronous handler adapted for %s.", name) + return async_to_sync(method) + return method + + def get_response(self, request): + """Return an HttpResponse object for the given HttpRequest.""" + # Setup default url resolver for this thread + set_urlconf(settings.ROOT_URLCONF) + response = self._middleware_chain(request) + response._resource_closers.append(request.close) + if response.status_code >= 400: + log_response( + "%s: %s", + response.reason_phrase, + request.path, + response=response, + request=request, + ) + return response + + async def get_response_async(self, request): + """ + Asynchronous version of get_response. + + Funneling everything, including WSGI, into a single async + get_response() is too slow. Avoid the context switch by using + a separate async response path. + """ + # Setup default url resolver for this thread. + set_urlconf(settings.ROOT_URLCONF) + response = await self._middleware_chain(request) + response._resource_closers.append(request.close) + if response.status_code >= 400: + await sync_to_async(log_response, thread_sensitive=False)( + "%s: %s", + response.reason_phrase, + request.path, + response=response, + request=request, + ) + return response + + def _get_response(self, request): + """ + Resolve and call the view, then apply view, exception, and + template_response middleware. This method is everything that happens + inside the request/response middleware. + """ + response = None + callback, callback_args, callback_kwargs = self.resolve_request(request) + + # Apply view middleware + for middleware_method in self._view_middleware: + response = middleware_method( + request, callback, callback_args, callback_kwargs + ) + if response: + break + + if response is None: + wrapped_callback = self.make_view_atomic(callback) + # If it is an asynchronous view, run it in a subthread. + if iscoroutinefunction(wrapped_callback): + wrapped_callback = async_to_sync(wrapped_callback) + try: + response = wrapped_callback(request, *callback_args, **callback_kwargs) + except Exception as e: + response = self.process_exception_by_middleware(e, request) + if response is None: + raise + + # Complain if the view returned None (a common error). + self.check_response(response, callback) + + # If the response supports deferred rendering, apply template + # response middleware and then render the response + if hasattr(response, "render") and callable(response.render): + for middleware_method in self._template_response_middleware: + response = middleware_method(request, response) + # Complain if the template response middleware returned None + # (a common error). + self.check_response( + response, + middleware_method, + name="%s.process_template_response" + % (middleware_method.__self__.__class__.__name__,), + ) + try: + response = response.render() + except Exception as e: + response = self.process_exception_by_middleware(e, request) + if response is None: + raise + + return response + + async def _get_response_async(self, request): + """ + Resolve and call the view, then apply view, exception, and + template_response middleware. This method is everything that happens + inside the request/response middleware. + """ + response = None + callback, callback_args, callback_kwargs = self.resolve_request(request) + + # Apply view middleware. + for middleware_method in self._view_middleware: + response = await middleware_method( + request, callback, callback_args, callback_kwargs + ) + if response: + break + + if response is None: + wrapped_callback = self.make_view_atomic(callback) + # If it is a synchronous view, run it in a subthread + if not iscoroutinefunction(wrapped_callback): + wrapped_callback = sync_to_async( + wrapped_callback, thread_sensitive=True + ) + try: + response = await wrapped_callback( + request, *callback_args, **callback_kwargs + ) + except Exception as e: + response = await sync_to_async( + self.process_exception_by_middleware, + thread_sensitive=True, + )(e, request) + if response is None: + raise + + # Complain if the view returned None or an uncalled coroutine. + self.check_response(response, callback) + + # If the response supports deferred rendering, apply template + # response middleware and then render the response + if hasattr(response, "render") and callable(response.render): + for middleware_method in self._template_response_middleware: + response = await middleware_method(request, response) + # Complain if the template response middleware returned None or + # an uncalled coroutine. + self.check_response( + response, + middleware_method, + name="%s.process_template_response" + % (middleware_method.__self__.__class__.__name__,), + ) + try: + if iscoroutinefunction(response.render): + response = await response.render() + else: + response = await sync_to_async( + response.render, thread_sensitive=True + )() + except Exception as e: + response = await sync_to_async( + self.process_exception_by_middleware, + thread_sensitive=True, + )(e, request) + if response is None: + raise + + # Make sure the response is not a coroutine + if asyncio.iscoroutine(response): + raise RuntimeError("Response is still a coroutine.") + return response + + def resolve_request(self, request): + """ + Retrieve/set the urlconf for the request. Return the view resolved, + with its args and kwargs. + """ + # Work out the resolver. + if hasattr(request, "urlconf"): + urlconf = request.urlconf + set_urlconf(urlconf) + resolver = get_resolver(urlconf) + else: + resolver = get_resolver() + # Resolve the view, and assign the match object back to the request. + resolver_match = resolver.resolve(request.path_info) + request.resolver_match = resolver_match + return resolver_match + + def check_response(self, response, callback, name=None): + """ + Raise an error if the view returned None or an uncalled coroutine. + """ + if not (response is None or asyncio.iscoroutine(response)): + return + if not name: + if isinstance(callback, types.FunctionType): # FBV + name = "The view %s.%s" % (callback.__module__, callback.__name__) + else: # CBV + name = "The view %s.%s.__call__" % ( + callback.__module__, + callback.__class__.__name__, + ) + if response is None: + raise ValueError( + "%s didn't return an HttpResponse object. It returned None " + "instead." % name + ) + elif asyncio.iscoroutine(response): + raise ValueError( + "%s didn't return an HttpResponse object. It returned an " + "unawaited coroutine instead. You may need to add an 'await' " + "into your view." % name + ) + + # Other utility methods. + + def make_view_atomic(self, view): + non_atomic_requests = getattr(view, "_non_atomic_requests", set()) + for alias, settings_dict in connections.settings.items(): + if settings_dict["ATOMIC_REQUESTS"] and alias not in non_atomic_requests: + if iscoroutinefunction(view): + raise RuntimeError( + "You cannot use ATOMIC_REQUESTS with async views." + ) + view = transaction.atomic(using=alias)(view) + return view + + def process_exception_by_middleware(self, exception, request): + """ + Pass the exception to the exception middleware. If no middleware + return a response for this exception, return None. + """ + for middleware_method in self._exception_middleware: + response = middleware_method(request, exception) + if response: + return response + return None + + +def reset_urlconf(sender, **kwargs): + """Reset the URLconf after each request is finished.""" + set_urlconf(None) + + +request_finished.connect(reset_urlconf) diff --git a/testbed/django__django/django/core/handlers/exception.py b/testbed/django__django/django/core/handlers/exception.py new file mode 100644 index 0000000000000000000000000000000000000000..a63291f3b94c3a5bbeb0201f831ea90dce97fa72 --- /dev/null +++ b/testbed/django__django/django/core/handlers/exception.py @@ -0,0 +1,185 @@ +import logging +import sys +from functools import wraps + +from asgiref.sync import iscoroutinefunction, sync_to_async + +from django.conf import settings +from django.core import signals +from django.core.exceptions import ( + BadRequest, + PermissionDenied, + RequestDataTooBig, + SuspiciousOperation, + TooManyFieldsSent, + TooManyFilesSent, +) +from django.http import Http404 +from django.http.multipartparser import MultiPartParserError +from django.urls import get_resolver, get_urlconf +from django.utils.log import log_response +from django.views import debug + + +def convert_exception_to_response(get_response): + """ + Wrap the given get_response callable in exception-to-response conversion. + + All exceptions will be converted. All known 4xx exceptions (Http404, + PermissionDenied, MultiPartParserError, SuspiciousOperation) will be + converted to the appropriate response, and all other exceptions will be + converted to 500 responses. + + This decorator is automatically applied to all middleware to ensure that + no middleware leaks an exception and that the next middleware in the stack + can rely on getting a response instead of an exception. + """ + if iscoroutinefunction(get_response): + + @wraps(get_response) + async def inner(request): + try: + response = await get_response(request) + except Exception as exc: + response = await sync_to_async( + response_for_exception, thread_sensitive=False + )(request, exc) + return response + + return inner + else: + + @wraps(get_response) + def inner(request): + try: + response = get_response(request) + except Exception as exc: + response = response_for_exception(request, exc) + return response + + return inner + + +def response_for_exception(request, exc): + if isinstance(exc, Http404): + if settings.DEBUG: + response = debug.technical_404_response(request, exc) + else: + response = get_exception_response( + request, get_resolver(get_urlconf()), 404, exc + ) + + elif isinstance(exc, PermissionDenied): + response = get_exception_response( + request, get_resolver(get_urlconf()), 403, exc + ) + log_response( + "Forbidden (Permission denied): %s", + request.path, + response=response, + request=request, + exception=exc, + ) + + elif isinstance(exc, MultiPartParserError): + response = get_exception_response( + request, get_resolver(get_urlconf()), 400, exc + ) + log_response( + "Bad request (Unable to parse request body): %s", + request.path, + response=response, + request=request, + exception=exc, + ) + + elif isinstance(exc, BadRequest): + if settings.DEBUG: + response = debug.technical_500_response( + request, *sys.exc_info(), status_code=400 + ) + else: + response = get_exception_response( + request, get_resolver(get_urlconf()), 400, exc + ) + log_response( + "%s: %s", + str(exc), + request.path, + response=response, + request=request, + exception=exc, + ) + elif isinstance(exc, SuspiciousOperation): + if isinstance(exc, (RequestDataTooBig, TooManyFieldsSent, TooManyFilesSent)): + # POST data can't be accessed again, otherwise the original + # exception would be raised. + request._mark_post_parse_error() + + # The request logger receives events for any problematic request + # The security logger receives events for all SuspiciousOperations + security_logger = logging.getLogger( + "django.security.%s" % exc.__class__.__name__ + ) + security_logger.error( + str(exc), + exc_info=exc, + extra={"status_code": 400, "request": request}, + ) + if settings.DEBUG: + response = debug.technical_500_response( + request, *sys.exc_info(), status_code=400 + ) + else: + response = get_exception_response( + request, get_resolver(get_urlconf()), 400, exc + ) + + else: + signals.got_request_exception.send(sender=None, request=request) + response = handle_uncaught_exception( + request, get_resolver(get_urlconf()), sys.exc_info() + ) + log_response( + "%s: %s", + response.reason_phrase, + request.path, + response=response, + request=request, + exception=exc, + ) + + # Force a TemplateResponse to be rendered. + if not getattr(response, "is_rendered", True) and callable( + getattr(response, "render", None) + ): + response = response.render() + + return response + + +def get_exception_response(request, resolver, status_code, exception): + try: + callback = resolver.resolve_error_handler(status_code) + response = callback(request, exception=exception) + except Exception: + signals.got_request_exception.send(sender=None, request=request) + response = handle_uncaught_exception(request, resolver, sys.exc_info()) + + return response + + +def handle_uncaught_exception(request, resolver, exc_info): + """ + Processing for any otherwise uncaught exceptions (those that will + generate HTTP 500 responses). + """ + if settings.DEBUG_PROPAGATE_EXCEPTIONS: + raise + + if settings.DEBUG: + return debug.technical_500_response(request, *exc_info) + + # Return an HttpResponse that displays a friendly error message. + callback = resolver.resolve_error_handler(500) + return callback(request) diff --git a/testbed/django__django/django/core/handlers/wsgi.py b/testbed/django__django/django/core/handlers/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..9324af083ecdb936dca877a72cb9adbe90b04aa1 --- /dev/null +++ b/testbed/django__django/django/core/handlers/wsgi.py @@ -0,0 +1,207 @@ +from io import IOBase + +from django.conf import settings +from django.core import signals +from django.core.handlers import base +from django.http import HttpRequest, QueryDict, parse_cookie +from django.urls import set_script_prefix +from django.utils.encoding import repercent_broken_unicode +from django.utils.functional import cached_property +from django.utils.regex_helper import _lazy_re_compile + +_slashes_re = _lazy_re_compile(rb"/+") + + +class LimitedStream(IOBase): + """ + Wrap another stream to disallow reading it past a number of bytes. + + Based on the implementation from werkzeug.wsgi.LimitedStream + See https://github.com/pallets/werkzeug/blob/dbf78f67/src/werkzeug/wsgi.py#L828 + """ + + def __init__(self, stream, limit): + self._read = stream.read + self._readline = stream.readline + self._pos = 0 + self.limit = limit + + def read(self, size=-1, /): + _pos = self._pos + limit = self.limit + if _pos >= limit: + return b"" + if size == -1 or size is None: + size = limit - _pos + else: + size = min(size, limit - _pos) + data = self._read(size) + self._pos += len(data) + return data + + def readline(self, size=-1, /): + _pos = self._pos + limit = self.limit + if _pos >= limit: + return b"" + if size == -1 or size is None: + size = limit - _pos + else: + size = min(size, limit - _pos) + line = self._readline(size) + self._pos += len(line) + return line + + +class WSGIRequest(HttpRequest): + def __init__(self, environ): + script_name = get_script_name(environ) + # If PATH_INFO is empty (e.g. accessing the SCRIPT_NAME URL without a + # trailing slash), operate as if '/' was requested. + path_info = get_path_info(environ) or "/" + self.environ = environ + self.path_info = path_info + # be careful to only replace the first slash in the path because of + # http://test/something and http://test//something being different as + # stated in RFC 3986. + self.path = "%s/%s" % (script_name.rstrip("/"), path_info.replace("/", "", 1)) + self.META = environ + self.META["PATH_INFO"] = path_info + self.META["SCRIPT_NAME"] = script_name + self.method = environ["REQUEST_METHOD"].upper() + # Set content_type, content_params, and encoding. + self._set_content_type_params(environ) + try: + content_length = int(environ.get("CONTENT_LENGTH")) + except (ValueError, TypeError): + content_length = 0 + self._stream = LimitedStream(self.environ["wsgi.input"], content_length) + self._read_started = False + self.resolver_match = None + + def _get_scheme(self): + return self.environ.get("wsgi.url_scheme") + + @cached_property + def GET(self): + # The WSGI spec says 'QUERY_STRING' may be absent. + raw_query_string = get_bytes_from_wsgi(self.environ, "QUERY_STRING", "") + return QueryDict(raw_query_string, encoding=self._encoding) + + def _get_post(self): + if not hasattr(self, "_post"): + self._load_post_and_files() + return self._post + + def _set_post(self, post): + self._post = post + + @cached_property + def COOKIES(self): + raw_cookie = get_str_from_wsgi(self.environ, "HTTP_COOKIE", "") + return parse_cookie(raw_cookie) + + @property + def FILES(self): + if not hasattr(self, "_files"): + self._load_post_and_files() + return self._files + + POST = property(_get_post, _set_post) + + +class WSGIHandler(base.BaseHandler): + request_class = WSGIRequest + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.load_middleware() + + def __call__(self, environ, start_response): + set_script_prefix(get_script_name(environ)) + signals.request_started.send(sender=self.__class__, environ=environ) + request = self.request_class(environ) + response = self.get_response(request) + + response._handler_class = self.__class__ + + status = "%d %s" % (response.status_code, response.reason_phrase) + response_headers = [ + *response.items(), + *(("Set-Cookie", c.output(header="")) for c in response.cookies.values()), + ] + start_response(status, response_headers) + if getattr(response, "file_to_stream", None) is not None and environ.get( + "wsgi.file_wrapper" + ): + # If `wsgi.file_wrapper` is used the WSGI server does not call + # .close on the response, but on the file wrapper. Patch it to use + # response.close instead which takes care of closing all files. + response.file_to_stream.close = response.close + response = environ["wsgi.file_wrapper"]( + response.file_to_stream, response.block_size + ) + return response + + +def get_path_info(environ): + """Return the HTTP request's PATH_INFO as a string.""" + path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "/") + + return repercent_broken_unicode(path_info).decode() + + +def get_script_name(environ): + """ + Return the equivalent of the HTTP request's SCRIPT_NAME environment + variable. If Apache mod_rewrite is used, return what would have been + the script name prior to any rewriting (so it's the script name as seen + from the client's perspective), unless the FORCE_SCRIPT_NAME setting is + set (to anything). + """ + if settings.FORCE_SCRIPT_NAME is not None: + return settings.FORCE_SCRIPT_NAME + + # If Apache's mod_rewrite had a whack at the URL, Apache set either + # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any + # rewrites. Unfortunately not every web server (lighttpd!) passes this + # information through all the time, so FORCE_SCRIPT_NAME, above, is still + # needed. + script_url = get_bytes_from_wsgi(environ, "SCRIPT_URL", "") or get_bytes_from_wsgi( + environ, "REDIRECT_URL", "" + ) + + if script_url: + if b"//" in script_url: + # mod_wsgi squashes multiple successive slashes in PATH_INFO, + # do the same with script_url before manipulating paths (#17133). + script_url = _slashes_re.sub(b"/", script_url) + path_info = get_bytes_from_wsgi(environ, "PATH_INFO", "") + script_name = script_url.removesuffix(path_info) + else: + script_name = get_bytes_from_wsgi(environ, "SCRIPT_NAME", "") + + return script_name.decode() + + +def get_bytes_from_wsgi(environ, key, default): + """ + Get a value from the WSGI environ dictionary as bytes. + + key and default should be strings. + """ + value = environ.get(key, default) + # Non-ASCII values in the WSGI environ are arbitrarily decoded with + # ISO-8859-1. This is wrong for Django websites where UTF-8 is the default. + # Re-encode to recover the original bytestring. + return value.encode("iso-8859-1") + + +def get_str_from_wsgi(environ, key, default): + """ + Get a value from the WSGI environ dictionary as str. + + key and default should be str objects. + """ + value = get_bytes_from_wsgi(environ, key, default) + return value.decode(errors="replace") diff --git a/testbed/django__django/django/core/mail/backends/base.py b/testbed/django__django/django/core/mail/backends/base.py new file mode 100644 index 0000000000000000000000000000000000000000..b35b964cb1c62a43f9444ac625f2c165ac37aa5e --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/base.py @@ -0,0 +1,62 @@ +"""Base email backend class.""" + + +class BaseEmailBackend: + """ + Base class for email backend implementations. + + Subclasses must at least overwrite send_messages(). + + open() and close() can be called indirectly by using a backend object as a + context manager: + + with backend as connection: + # do something with connection + pass + """ + + def __init__(self, fail_silently=False, **kwargs): + self.fail_silently = fail_silently + + def open(self): + """ + Open a network connection. + + This method can be overwritten by backend implementations to + open a network connection. + + It's up to the backend implementation to track the status of + a network connection if it's needed by the backend. + + This method can be called by applications to force a single + network connection to be used when sending mails. See the + send_messages() method of the SMTP backend for a reference + implementation. + + The default implementation does nothing. + """ + pass + + def close(self): + """Close a network connection.""" + pass + + def __enter__(self): + try: + self.open() + except Exception: + self.close() + raise + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def send_messages(self, email_messages): + """ + Send one or more EmailMessage objects and return the number of email + messages sent. + """ + raise NotImplementedError( + "subclasses of BaseEmailBackend must override send_messages() method" + ) diff --git a/testbed/django__django/django/core/mail/backends/smtp.py b/testbed/django__django/django/core/mail/backends/smtp.py new file mode 100644 index 0000000000000000000000000000000000000000..1ee48269aee51eccf85c2c1e33b90a30c10d550d --- /dev/null +++ b/testbed/django__django/django/core/mail/backends/smtp.py @@ -0,0 +1,161 @@ +"""SMTP email backend class.""" +import smtplib +import ssl +import threading + +from django.conf import settings +from django.core.mail.backends.base import BaseEmailBackend +from django.core.mail.message import sanitize_address +from django.core.mail.utils import DNS_NAME +from django.utils.functional import cached_property + + +class EmailBackend(BaseEmailBackend): + """ + A wrapper that manages the SMTP network connection. + """ + + def __init__( + self, + host=None, + port=None, + username=None, + password=None, + use_tls=None, + fail_silently=False, + use_ssl=None, + timeout=None, + ssl_keyfile=None, + ssl_certfile=None, + **kwargs, + ): + super().__init__(fail_silently=fail_silently) + self.host = host or settings.EMAIL_HOST + self.port = port or settings.EMAIL_PORT + self.username = settings.EMAIL_HOST_USER if username is None else username + self.password = settings.EMAIL_HOST_PASSWORD if password is None else password + self.use_tls = settings.EMAIL_USE_TLS if use_tls is None else use_tls + self.use_ssl = settings.EMAIL_USE_SSL if use_ssl is None else use_ssl + self.timeout = settings.EMAIL_TIMEOUT if timeout is None else timeout + self.ssl_keyfile = ( + settings.EMAIL_SSL_KEYFILE if ssl_keyfile is None else ssl_keyfile + ) + self.ssl_certfile = ( + settings.EMAIL_SSL_CERTFILE if ssl_certfile is None else ssl_certfile + ) + if self.use_ssl and self.use_tls: + raise ValueError( + "EMAIL_USE_TLS/EMAIL_USE_SSL are mutually exclusive, so only set " + "one of those settings to True." + ) + self.connection = None + self._lock = threading.RLock() + + @property + def connection_class(self): + return smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP + + @cached_property + def ssl_context(self): + if self.ssl_certfile or self.ssl_keyfile: + ssl_context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) + ssl_context.load_cert_chain(self.ssl_certfile, self.ssl_keyfile) + return ssl_context + else: + return ssl.create_default_context() + + def open(self): + """ + Ensure an open connection to the email server. Return whether or not a + new connection was required (True or False) or None if an exception + passed silently. + """ + if self.connection: + # Nothing to do if the connection is already open. + return False + + # If local_hostname is not specified, socket.getfqdn() gets used. + # For performance, we use the cached FQDN for local_hostname. + connection_params = {"local_hostname": DNS_NAME.get_fqdn()} + if self.timeout is not None: + connection_params["timeout"] = self.timeout + if self.use_ssl: + connection_params["context"] = self.ssl_context + try: + self.connection = self.connection_class( + self.host, self.port, **connection_params + ) + + # TLS/SSL are mutually exclusive, so only attempt TLS over + # non-secure connections. + if not self.use_ssl and self.use_tls: + self.connection.starttls(context=self.ssl_context) + if self.username and self.password: + self.connection.login(self.username, self.password) + return True + except OSError: + if not self.fail_silently: + raise + + def close(self): + """Close the connection to the email server.""" + if self.connection is None: + return + try: + try: + self.connection.quit() + except (ssl.SSLError, smtplib.SMTPServerDisconnected): + # This happens when calling quit() on a TLS connection + # sometimes, or when the connection was already disconnected + # by the server. + self.connection.close() + except smtplib.SMTPException: + if self.fail_silently: + return + raise + finally: + self.connection = None + + def send_messages(self, email_messages): + """ + Send one or more EmailMessage objects and return the number of email + messages sent. + """ + if not email_messages: + return 0 + with self._lock: + new_conn_created = self.open() + if not self.connection or new_conn_created is None: + # We failed silently on open(). + # Trying to send would be pointless. + return 0 + num_sent = 0 + try: + for message in email_messages: + sent = self._send(message) + if sent: + num_sent += 1 + finally: + if new_conn_created: + self.close() + return num_sent + + def _send(self, email_message): + """A helper method that does the actual sending.""" + if not email_message.recipients(): + return False + encoding = email_message.encoding or settings.DEFAULT_CHARSET + from_email = sanitize_address(email_message.from_email, encoding) + recipients = [ + sanitize_address(addr, encoding) for addr in email_message.recipients() + ] + message = email_message.message() + try: + self.connection.sendmail( + from_email, recipients, message.as_bytes(linesep="\r\n") + ) + except smtplib.SMTPException: + if not self.fail_silently: + raise + return False + return True diff --git a/testbed/django__django/django/core/mail/message.py b/testbed/django__django/django/core/mail/message.py new file mode 100644 index 0000000000000000000000000000000000000000..f3fe6186c7f58ce70f9c2714bde44f22b21cfd41 --- /dev/null +++ b/testbed/django__django/django/core/mail/message.py @@ -0,0 +1,493 @@ +import mimetypes +from email import charset as Charset +from email import encoders as Encoders +from email import generator, message_from_string +from email.errors import HeaderParseError +from email.header import Header +from email.headerregistry import Address, parser +from email.message import Message +from email.mime.base import MIMEBase +from email.mime.message import MIMEMessage +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.utils import formataddr, formatdate, getaddresses, make_msgid +from io import BytesIO, StringIO +from pathlib import Path + +from django.conf import settings +from django.core.mail.utils import DNS_NAME +from django.utils.encoding import force_str, punycode + +# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from +# some spam filters. +utf8_charset = Charset.Charset("utf-8") +utf8_charset.body_encoding = None # Python defaults to BASE64 +utf8_charset_qp = Charset.Charset("utf-8") +utf8_charset_qp.body_encoding = Charset.QP + +# Default MIME type to use on attachments (if it is not explicitly given +# and cannot be guessed). +DEFAULT_ATTACHMENT_MIME_TYPE = "application/octet-stream" + +RFC5322_EMAIL_LINE_LENGTH_LIMIT = 998 + + +class BadHeaderError(ValueError): + pass + + +# Header names that contain structured address data (RFC 5322). +ADDRESS_HEADERS = { + "from", + "sender", + "reply-to", + "to", + "cc", + "bcc", + "resent-from", + "resent-sender", + "resent-to", + "resent-cc", + "resent-bcc", +} + + +def forbid_multi_line_headers(name, val, encoding): + """Forbid multi-line headers to prevent header injection.""" + encoding = encoding or settings.DEFAULT_CHARSET + val = str(val) # val may be lazy + if "\n" in val or "\r" in val: + raise BadHeaderError( + "Header values can't contain newlines (got %r for header %r)" % (val, name) + ) + try: + val.encode("ascii") + except UnicodeEncodeError: + if name.lower() in ADDRESS_HEADERS: + val = ", ".join( + sanitize_address(addr, encoding) for addr in getaddresses((val,)) + ) + else: + val = Header(val, encoding).encode() + else: + if name.lower() == "subject": + val = Header(val).encode() + return name, val + + +def sanitize_address(addr, encoding): + """ + Format a pair of (name, address) or an email address string. + """ + address = None + if not isinstance(addr, tuple): + addr = force_str(addr) + try: + token, rest = parser.get_mailbox(addr) + except (HeaderParseError, ValueError, IndexError): + raise ValueError('Invalid address "%s"' % addr) + else: + if rest: + # The entire email address must be parsed. + raise ValueError( + 'Invalid address; only %s could be parsed from "%s"' % (token, addr) + ) + nm = token.display_name or "" + localpart = token.local_part + domain = token.domain or "" + else: + nm, address = addr + localpart, domain = address.rsplit("@", 1) + + address_parts = nm + localpart + domain + if "\n" in address_parts or "\r" in address_parts: + raise ValueError("Invalid address; address parts cannot contain newlines.") + + # Avoid UTF-8 encode, if it's possible. + try: + nm.encode("ascii") + nm = Header(nm).encode() + except UnicodeEncodeError: + nm = Header(nm, encoding).encode() + try: + localpart.encode("ascii") + except UnicodeEncodeError: + localpart = Header(localpart, encoding).encode() + domain = punycode(domain) + + parsed_address = Address(username=localpart, domain=domain) + return formataddr((nm, parsed_address.addr_spec)) + + +class MIMEMixin: + def as_string(self, unixfrom=False, linesep="\n"): + """Return the entire formatted message as a string. + Optional `unixfrom' when True, means include the Unix From_ envelope + header. + + This overrides the default as_string() implementation to not mangle + lines that begin with 'From '. See bug #13433 for details. + """ + fp = StringIO() + g = generator.Generator(fp, mangle_from_=False) + g.flatten(self, unixfrom=unixfrom, linesep=linesep) + return fp.getvalue() + + def as_bytes(self, unixfrom=False, linesep="\n"): + """Return the entire formatted message as bytes. + Optional `unixfrom' when True, means include the Unix From_ envelope + header. + + This overrides the default as_bytes() implementation to not mangle + lines that begin with 'From '. See bug #13433 for details. + """ + fp = BytesIO() + g = generator.BytesGenerator(fp, mangle_from_=False) + g.flatten(self, unixfrom=unixfrom, linesep=linesep) + return fp.getvalue() + + +class SafeMIMEMessage(MIMEMixin, MIMEMessage): + def __setitem__(self, name, val): + # message/rfc822 attachments must be ASCII + name, val = forbid_multi_line_headers(name, val, "ascii") + MIMEMessage.__setitem__(self, name, val) + + +class SafeMIMEText(MIMEMixin, MIMEText): + def __init__(self, _text, _subtype="plain", _charset=None): + self.encoding = _charset + MIMEText.__init__(self, _text, _subtype=_subtype, _charset=_charset) + + def __setitem__(self, name, val): + name, val = forbid_multi_line_headers(name, val, self.encoding) + MIMEText.__setitem__(self, name, val) + + def set_payload(self, payload, charset=None): + if charset == "utf-8" and not isinstance(charset, Charset.Charset): + has_long_lines = any( + len(line.encode()) > RFC5322_EMAIL_LINE_LENGTH_LIMIT + for line in payload.splitlines() + ) + # Quoted-Printable encoding has the side effect of shortening long + # lines, if any (#22561). + charset = utf8_charset_qp if has_long_lines else utf8_charset + MIMEText.set_payload(self, payload, charset=charset) + + +class SafeMIMEMultipart(MIMEMixin, MIMEMultipart): + def __init__( + self, _subtype="mixed", boundary=None, _subparts=None, encoding=None, **_params + ): + self.encoding = encoding + MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params) + + def __setitem__(self, name, val): + name, val = forbid_multi_line_headers(name, val, self.encoding) + MIMEMultipart.__setitem__(self, name, val) + + +class EmailMessage: + """A container for email information.""" + + content_subtype = "plain" + mixed_subtype = "mixed" + encoding = None # None => use settings default + + def __init__( + self, + subject="", + body="", + from_email=None, + to=None, + bcc=None, + connection=None, + attachments=None, + headers=None, + cc=None, + reply_to=None, + ): + """ + Initialize a single email message (which can be sent to multiple + recipients). + """ + if to: + if isinstance(to, str): + raise TypeError('"to" argument must be a list or tuple') + self.to = list(to) + else: + self.to = [] + if cc: + if isinstance(cc, str): + raise TypeError('"cc" argument must be a list or tuple') + self.cc = list(cc) + else: + self.cc = [] + if bcc: + if isinstance(bcc, str): + raise TypeError('"bcc" argument must be a list or tuple') + self.bcc = list(bcc) + else: + self.bcc = [] + if reply_to: + if isinstance(reply_to, str): + raise TypeError('"reply_to" argument must be a list or tuple') + self.reply_to = list(reply_to) + else: + self.reply_to = [] + self.from_email = from_email or settings.DEFAULT_FROM_EMAIL + self.subject = subject + self.body = body or "" + self.attachments = [] + if attachments: + for attachment in attachments: + if isinstance(attachment, MIMEBase): + self.attach(attachment) + else: + self.attach(*attachment) + self.extra_headers = headers or {} + self.connection = connection + + def get_connection(self, fail_silently=False): + from django.core.mail import get_connection + + if not self.connection: + self.connection = get_connection(fail_silently=fail_silently) + return self.connection + + def message(self): + encoding = self.encoding or settings.DEFAULT_CHARSET + msg = SafeMIMEText(self.body, self.content_subtype, encoding) + msg = self._create_message(msg) + msg["Subject"] = self.subject + msg["From"] = self.extra_headers.get("From", self.from_email) + self._set_list_header_if_not_empty(msg, "To", self.to) + self._set_list_header_if_not_empty(msg, "Cc", self.cc) + self._set_list_header_if_not_empty(msg, "Reply-To", self.reply_to) + + # Email header names are case-insensitive (RFC 2045), so we have to + # accommodate that when doing comparisons. + header_names = [key.lower() for key in self.extra_headers] + if "date" not in header_names: + # formatdate() uses stdlib methods to format the date, which use + # the stdlib/OS concept of a timezone, however, Django sets the + # TZ environment variable based on the TIME_ZONE setting which + # will get picked up by formatdate(). + msg["Date"] = formatdate(localtime=settings.EMAIL_USE_LOCALTIME) + if "message-id" not in header_names: + # Use cached DNS_NAME for performance + msg["Message-ID"] = make_msgid(domain=DNS_NAME) + for name, value in self.extra_headers.items(): + if name.lower() != "from": # From is already handled + msg[name] = value + return msg + + def recipients(self): + """ + Return a list of all recipients of the email (includes direct + addressees as well as Cc and Bcc entries). + """ + return [email for email in (self.to + self.cc + self.bcc) if email] + + def send(self, fail_silently=False): + """Send the email message.""" + if not self.recipients(): + # Don't bother creating the network connection if there's nobody to + # send to. + return 0 + return self.get_connection(fail_silently).send_messages([self]) + + def attach(self, filename=None, content=None, mimetype=None): + """ + Attach a file with the given filename and content. The filename can + be omitted and the mimetype is guessed, if not provided. + + If the first parameter is a MIMEBase subclass, insert it directly + into the resulting message attachments. + + For a text/* mimetype (guessed or specified), when a bytes object is + specified as content, decode it as UTF-8. If that fails, set the + mimetype to DEFAULT_ATTACHMENT_MIME_TYPE and don't decode the content. + """ + if isinstance(filename, MIMEBase): + if content is not None or mimetype is not None: + raise ValueError( + "content and mimetype must not be given when a MIMEBase " + "instance is provided." + ) + self.attachments.append(filename) + elif content is None: + raise ValueError("content must be provided.") + else: + mimetype = ( + mimetype + or mimetypes.guess_type(filename)[0] + or DEFAULT_ATTACHMENT_MIME_TYPE + ) + basetype, subtype = mimetype.split("/", 1) + + if basetype == "text": + if isinstance(content, bytes): + try: + content = content.decode() + except UnicodeDecodeError: + # If mimetype suggests the file is text but it's + # actually binary, read() raises a UnicodeDecodeError. + mimetype = DEFAULT_ATTACHMENT_MIME_TYPE + + self.attachments.append((filename, content, mimetype)) + + def attach_file(self, path, mimetype=None): + """ + Attach a file from the filesystem. + + Set the mimetype to DEFAULT_ATTACHMENT_MIME_TYPE if it isn't specified + and cannot be guessed. + + For a text/* mimetype (guessed or specified), decode the file's content + as UTF-8. If that fails, set the mimetype to + DEFAULT_ATTACHMENT_MIME_TYPE and don't decode the content. + """ + path = Path(path) + with path.open("rb") as file: + content = file.read() + self.attach(path.name, content, mimetype) + + def _create_message(self, msg): + return self._create_attachments(msg) + + def _create_attachments(self, msg): + if self.attachments: + encoding = self.encoding or settings.DEFAULT_CHARSET + body_msg = msg + msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding) + if self.body or body_msg.is_multipart(): + msg.attach(body_msg) + for attachment in self.attachments: + if isinstance(attachment, MIMEBase): + msg.attach(attachment) + else: + msg.attach(self._create_attachment(*attachment)) + return msg + + def _create_mime_attachment(self, content, mimetype): + """ + Convert the content, mimetype pair into a MIME attachment object. + + If the mimetype is message/rfc822, content may be an + email.Message or EmailMessage object, as well as a str. + """ + basetype, subtype = mimetype.split("/", 1) + if basetype == "text": + encoding = self.encoding or settings.DEFAULT_CHARSET + attachment = SafeMIMEText(content, subtype, encoding) + elif basetype == "message" and subtype == "rfc822": + # Bug #18967: Per RFC 2046 Section 5.2.1, message/rfc822 + # attachments must not be base64 encoded. + if isinstance(content, EmailMessage): + # convert content into an email.Message first + content = content.message() + elif not isinstance(content, Message): + # For compatibility with existing code, parse the message + # into an email.Message object if it is not one already. + content = message_from_string(force_str(content)) + + attachment = SafeMIMEMessage(content, subtype) + else: + # Encode non-text attachments with base64. + attachment = MIMEBase(basetype, subtype) + attachment.set_payload(content) + Encoders.encode_base64(attachment) + return attachment + + def _create_attachment(self, filename, content, mimetype=None): + """ + Convert the filename, content, mimetype triple into a MIME attachment + object. + """ + attachment = self._create_mime_attachment(content, mimetype) + if filename: + try: + filename.encode("ascii") + except UnicodeEncodeError: + filename = ("utf-8", "", filename) + attachment.add_header( + "Content-Disposition", "attachment", filename=filename + ) + return attachment + + def _set_list_header_if_not_empty(self, msg, header, values): + """ + Set msg's header, either from self.extra_headers, if present, or from + the values argument. + """ + if values: + try: + value = self.extra_headers[header] + except KeyError: + value = ", ".join(str(v) for v in values) + msg[header] = value + + +class EmailMultiAlternatives(EmailMessage): + """ + A version of EmailMessage that makes it easy to send multipart/alternative + messages. For example, including text and HTML versions of the text is + made easier. + """ + + alternative_subtype = "alternative" + + def __init__( + self, + subject="", + body="", + from_email=None, + to=None, + bcc=None, + connection=None, + attachments=None, + headers=None, + alternatives=None, + cc=None, + reply_to=None, + ): + """ + Initialize a single email message (which can be sent to multiple + recipients). + """ + super().__init__( + subject, + body, + from_email, + to, + bcc, + connection, + attachments, + headers, + cc, + reply_to, + ) + self.alternatives = alternatives or [] + + def attach_alternative(self, content, mimetype): + """Attach an alternative content representation.""" + if content is None or mimetype is None: + raise ValueError("Both content and mimetype must be provided.") + self.alternatives.append((content, mimetype)) + + def _create_message(self, msg): + return self._create_attachments(self._create_alternatives(msg)) + + def _create_alternatives(self, msg): + encoding = self.encoding or settings.DEFAULT_CHARSET + if self.alternatives: + body_msg = msg + msg = SafeMIMEMultipart( + _subtype=self.alternative_subtype, encoding=encoding + ) + if self.body: + msg.attach(body_msg) + for alternative in self.alternatives: + msg.attach(self._create_mime_attachment(*alternative)) + return msg diff --git a/testbed/django__django/django/core/management/commands/dbshell.py b/testbed/django__django/django/core/management/commands/dbshell.py new file mode 100644 index 0000000000000000000000000000000000000000..bdb130594faf455325adfc8aa579acb4266707b1 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/dbshell.py @@ -0,0 +1,48 @@ +import subprocess + +from django.core.management.base import BaseCommand, CommandError +from django.db import DEFAULT_DB_ALIAS, connections + + +class Command(BaseCommand): + help = ( + "Runs the command-line client for specified database, or the " + "default database if none is provided." + ) + + requires_system_checks = [] + + def add_arguments(self, parser): + parser.add_argument( + "--database", + default=DEFAULT_DB_ALIAS, + help=( + "Nominates a database onto which to open a shell. Defaults to the " + '"default" database.' + ), + ) + parameters = parser.add_argument_group("parameters", prefix_chars="--") + parameters.add_argument("parameters", nargs="*") + + def handle(self, **options): + connection = connections[options["database"]] + try: + connection.client.runshell(options["parameters"]) + except FileNotFoundError: + # Note that we're assuming the FileNotFoundError relates to the + # command missing. It could be raised for some other reason, in + # which case this error message would be inaccurate. Still, this + # message catches the common case. + raise CommandError( + "You appear not to have the %r program installed or on your path." + % connection.client.executable_name + ) + except subprocess.CalledProcessError as e: + raise CommandError( + '"%s" returned non-zero exit status %s.' + % ( + " ".join(map(str, e.cmd)), + e.returncode, + ), + returncode=e.returncode, + ) diff --git a/testbed/django__django/django/core/management/commands/sendtestemail.py b/testbed/django__django/django/core/management/commands/sendtestemail.py new file mode 100644 index 0000000000000000000000000000000000000000..fbb2a7856eb9041c3ea739f30d374d35215f7f36 --- /dev/null +++ b/testbed/django__django/django/core/management/commands/sendtestemail.py @@ -0,0 +1,46 @@ +import socket + +from django.core.mail import mail_admins, mail_managers, send_mail +from django.core.management.base import BaseCommand +from django.utils import timezone + + +class Command(BaseCommand): + help = "Sends a test email to the email addresses specified as arguments." + missing_args_message = ( + "You must specify some email recipients, or pass the --managers or --admin " + "options." + ) + + def add_arguments(self, parser): + parser.add_argument( + "email", + nargs="*", + help="One or more email addresses to send a test email to.", + ) + parser.add_argument( + "--managers", + action="store_true", + help="Send a test email to the addresses specified in settings.MANAGERS.", + ) + parser.add_argument( + "--admins", + action="store_true", + help="Send a test email to the addresses specified in settings.ADMINS.", + ) + + def handle(self, *args, **kwargs): + subject = "Test email from %s on %s" % (socket.gethostname(), timezone.now()) + + send_mail( + subject=subject, + message="If you're reading this, it was successful.", + from_email=None, + recipient_list=kwargs["email"], + ) + + if kwargs["managers"]: + mail_managers(subject, "This email was sent to the site managers.") + + if kwargs["admins"]: + mail_admins(subject, "This email was sent to the site admins.") diff --git a/testbed/django__django/django/forms/__init__.py b/testbed/django__django/django/forms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1c319219a636729dc3c8f675b3628b06661c71fe --- /dev/null +++ b/testbed/django__django/django/forms/__init__.py @@ -0,0 +1,11 @@ +""" +Django validation and HTML form handling. +""" + +from django.core.exceptions import ValidationError # NOQA +from django.forms.boundfield import * # NOQA +from django.forms.fields import * # NOQA +from django.forms.forms import * # NOQA +from django.forms.formsets import * # NOQA +from django.forms.models import * # NOQA +from django.forms.widgets import * # NOQA diff --git a/testbed/django__django/django/forms/boundfield.py b/testbed/django__django/django/forms/boundfield.py new file mode 100644 index 0000000000000000000000000000000000000000..deba7393296d0ae23fe3a3159c9a80251f25e9fb --- /dev/null +++ b/testbed/django__django/django/forms/boundfield.py @@ -0,0 +1,352 @@ +import re + +from django.core.exceptions import ValidationError +from django.forms.utils import RenderableFieldMixin, pretty_name +from django.forms.widgets import MultiWidget, Textarea, TextInput +from django.utils.functional import cached_property +from django.utils.html import format_html, html_safe +from django.utils.translation import gettext_lazy as _ + +__all__ = ("BoundField",) + + +class BoundField(RenderableFieldMixin): + "A Field plus data" + + def __init__(self, form, field, name): + self.form = form + self.field = field + self.name = name + self.html_name = form.add_prefix(name) + self.html_initial_name = form.add_initial_prefix(name) + self.html_initial_id = form.add_initial_prefix(self.auto_id) + if self.field.label is None: + self.label = pretty_name(name) + else: + self.label = self.field.label + self.help_text = field.help_text or "" + self.renderer = form.renderer + + @cached_property + def subwidgets(self): + """ + Most widgets yield a single subwidget, but others like RadioSelect and + CheckboxSelectMultiple produce one subwidget for each choice. + + This property is cached so that only one database query occurs when + rendering ModelChoiceFields. + """ + id_ = self.field.widget.attrs.get("id") or self.auto_id + attrs = {"id": id_} if id_ else {} + attrs = self.build_widget_attrs(attrs) + return [ + BoundWidget(self.field.widget, widget, self.form.renderer) + for widget in self.field.widget.subwidgets( + self.html_name, self.value(), attrs=attrs + ) + ] + + def __bool__(self): + # BoundField evaluates to True even if it doesn't have subwidgets. + return True + + def __iter__(self): + return iter(self.subwidgets) + + def __len__(self): + return len(self.subwidgets) + + def __getitem__(self, idx): + # Prevent unnecessary reevaluation when accessing BoundField's attrs + # from templates. + if not isinstance(idx, (int, slice)): + raise TypeError( + "BoundField indices must be integers or slices, not %s." + % type(idx).__name__ + ) + return self.subwidgets[idx] + + @property + def errors(self): + """ + Return an ErrorList (empty if there are no errors) for this field. + """ + return self.form.errors.get( + self.name, self.form.error_class(renderer=self.form.renderer) + ) + + @property + def template_name(self): + return self.field.template_name or self.form.renderer.field_template_name + + def get_context(self): + return {"field": self} + + def as_widget(self, widget=None, attrs=None, only_initial=False): + """ + Render the field by rendering the passed widget, adding any HTML + attributes passed as attrs. If a widget isn't specified, use the + field's default widget. + """ + widget = widget or self.field.widget + if self.field.localize: + widget.is_localized = True + attrs = attrs or {} + attrs = self.build_widget_attrs(attrs, widget) + if self.auto_id and "id" not in widget.attrs: + attrs.setdefault( + "id", self.html_initial_id if only_initial else self.auto_id + ) + if only_initial and self.html_initial_name in self.form.data: + # Propagate the hidden initial value. + value = self.form._widget_data_value( + self.field.hidden_widget(), + self.html_initial_name, + ) + else: + value = self.value() + return widget.render( + name=self.html_initial_name if only_initial else self.html_name, + value=value, + attrs=attrs, + renderer=self.form.renderer, + ) + + def as_text(self, attrs=None, **kwargs): + """ + Return a string of HTML for representing this as an . + """ + return self.as_widget(TextInput(), attrs, **kwargs) + + def as_textarea(self, attrs=None, **kwargs): + """Return a string of HTML for representing this as a diff --git a/testbed/django__django/django/forms/jinja2/django/forms/widgets/url.html b/testbed/django__django/django/forms/jinja2/django/forms/widgets/url.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/jinja2/django/forms/widgets/url.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/models.py b/testbed/django__django/django/forms/models.py new file mode 100644 index 0000000000000000000000000000000000000000..dc30d79b5d3d1f76c4766d38c9deebd2b9ee53f3 --- /dev/null +++ b/testbed/django__django/django/forms/models.py @@ -0,0 +1,1674 @@ +""" +Helper functions for creating Form classes from Django models +and database field objects. +""" +from itertools import chain + +from django.core.exceptions import ( + NON_FIELD_ERRORS, + FieldError, + ImproperlyConfigured, + ValidationError, +) +from django.db.models.utils import AltersData +from django.forms.fields import ChoiceField, Field +from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass +from django.forms.formsets import BaseFormSet, formset_factory +from django.forms.utils import ErrorList +from django.forms.widgets import ( + HiddenInput, + MultipleHiddenInput, + RadioSelect, + SelectMultiple, +) +from django.utils.text import capfirst, get_text_list +from django.utils.translation import gettext +from django.utils.translation import gettext_lazy as _ + +__all__ = ( + "ModelForm", + "BaseModelForm", + "model_to_dict", + "fields_for_model", + "ModelChoiceField", + "ModelMultipleChoiceField", + "ALL_FIELDS", + "BaseModelFormSet", + "modelformset_factory", + "BaseInlineFormSet", + "inlineformset_factory", + "modelform_factory", +) + +ALL_FIELDS = "__all__" + + +def construct_instance(form, instance, fields=None, exclude=None): + """ + Construct and return a model instance from the bound ``form``'s + ``cleaned_data``, but do not save the returned instance to the database. + """ + from django.db import models + + opts = instance._meta + + cleaned_data = form.cleaned_data + file_field_list = [] + for f in opts.fields: + if ( + not f.editable + or isinstance(f, models.AutoField) + or f.name not in cleaned_data + ): + continue + if fields is not None and f.name not in fields: + continue + if exclude and f.name in exclude: + continue + # Leave defaults for fields that aren't in POST data, except for + # checkbox inputs because they don't appear in POST data if not checked. + if ( + f.has_default() + and form[f.name].field.widget.value_omitted_from_data( + form.data, form.files, form.add_prefix(f.name) + ) + and cleaned_data.get(f.name) in form[f.name].field.empty_values + ): + continue + # Defer saving file-type fields until after the other fields, so a + # callable upload_to can use the values from other fields. + if isinstance(f, models.FileField): + file_field_list.append(f) + else: + f.save_form_data(instance, cleaned_data[f.name]) + + for f in file_field_list: + f.save_form_data(instance, cleaned_data[f.name]) + + return instance + + +# ModelForms ################################################################# + + +def model_to_dict(instance, fields=None, exclude=None): + """ + Return a dict containing the data in ``instance`` suitable for passing as + a Form's ``initial`` keyword argument. + + ``fields`` is an optional list of field names. If provided, return only the + named. + + ``exclude`` is an optional list of field names. If provided, exclude the + named from the returned dict, even if they are listed in the ``fields`` + argument. + """ + opts = instance._meta + data = {} + for f in chain(opts.concrete_fields, opts.private_fields, opts.many_to_many): + if not getattr(f, "editable", False): + continue + if fields is not None and f.name not in fields: + continue + if exclude and f.name in exclude: + continue + data[f.name] = f.value_from_object(instance) + return data + + +def apply_limit_choices_to_to_formfield(formfield): + """Apply limit_choices_to to the formfield's queryset if needed.""" + from django.db.models import Exists, OuterRef, Q + + if hasattr(formfield, "queryset") and hasattr(formfield, "get_limit_choices_to"): + limit_choices_to = formfield.get_limit_choices_to() + if limit_choices_to: + complex_filter = limit_choices_to + if not isinstance(complex_filter, Q): + complex_filter = Q(**limit_choices_to) + complex_filter &= Q(pk=OuterRef("pk")) + # Use Exists() to avoid potential duplicates. + formfield.queryset = formfield.queryset.filter( + Exists(formfield.queryset.model._base_manager.filter(complex_filter)), + ) + + +def fields_for_model( + model, + fields=None, + exclude=None, + widgets=None, + formfield_callback=None, + localized_fields=None, + labels=None, + help_texts=None, + error_messages=None, + field_classes=None, + *, + apply_limit_choices_to=True, + form_declared_fields=None, +): + """ + Return a dictionary containing form fields for the given model. + + ``fields`` is an optional list of field names. If provided, return only the + named fields. + + ``exclude`` is an optional list of field names. If provided, exclude the + named fields from the returned fields, even if they are listed in the + ``fields`` argument. + + ``widgets`` is a dictionary of model field names mapped to a widget. + + ``formfield_callback`` is a callable that takes a model field and returns + a form field. + + ``localized_fields`` is a list of names of fields which should be localized. + + ``labels`` is a dictionary of model field names mapped to a label. + + ``help_texts`` is a dictionary of model field names mapped to a help text. + + ``error_messages`` is a dictionary of model field names mapped to a + dictionary of error messages. + + ``field_classes`` is a dictionary of model field names mapped to a form + field class. + + ``apply_limit_choices_to`` is a boolean indicating if limit_choices_to + should be applied to a field's queryset. + + ``form_declared_fields`` is a dictionary of form fields created directly on + a form. + """ + form_declared_fields = form_declared_fields or {} + field_dict = {} + ignored = [] + opts = model._meta + # Avoid circular import + from django.db.models import Field as ModelField + + sortable_private_fields = [ + f for f in opts.private_fields if isinstance(f, ModelField) + ] + for f in sorted( + chain(opts.concrete_fields, sortable_private_fields, opts.many_to_many) + ): + if not getattr(f, "editable", False): + if ( + fields is not None + and f.name in fields + and (exclude is None or f.name not in exclude) + ): + raise FieldError( + "'%s' cannot be specified for %s model form as it is a " + "non-editable field" % (f.name, model.__name__) + ) + continue + if fields is not None and f.name not in fields: + continue + if exclude and f.name in exclude: + continue + if f.name in form_declared_fields: + field_dict[f.name] = form_declared_fields[f.name] + continue + + kwargs = {} + if widgets and f.name in widgets: + kwargs["widget"] = widgets[f.name] + if localized_fields == ALL_FIELDS or ( + localized_fields and f.name in localized_fields + ): + kwargs["localize"] = True + if labels and f.name in labels: + kwargs["label"] = labels[f.name] + if help_texts and f.name in help_texts: + kwargs["help_text"] = help_texts[f.name] + if error_messages and f.name in error_messages: + kwargs["error_messages"] = error_messages[f.name] + if field_classes and f.name in field_classes: + kwargs["form_class"] = field_classes[f.name] + + if formfield_callback is None: + formfield = f.formfield(**kwargs) + elif not callable(formfield_callback): + raise TypeError("formfield_callback must be a function or callable") + else: + formfield = formfield_callback(f, **kwargs) + + if formfield: + if apply_limit_choices_to: + apply_limit_choices_to_to_formfield(formfield) + field_dict[f.name] = formfield + else: + ignored.append(f.name) + if fields: + field_dict = { + f: field_dict.get(f) + for f in fields + if (not exclude or f not in exclude) and f not in ignored + } + return field_dict + + +class ModelFormOptions: + def __init__(self, options=None): + self.model = getattr(options, "model", None) + self.fields = getattr(options, "fields", None) + self.exclude = getattr(options, "exclude", None) + self.widgets = getattr(options, "widgets", None) + self.localized_fields = getattr(options, "localized_fields", None) + self.labels = getattr(options, "labels", None) + self.help_texts = getattr(options, "help_texts", None) + self.error_messages = getattr(options, "error_messages", None) + self.field_classes = getattr(options, "field_classes", None) + self.formfield_callback = getattr(options, "formfield_callback", None) + + +class ModelFormMetaclass(DeclarativeFieldsMetaclass): + def __new__(mcs, name, bases, attrs): + new_class = super().__new__(mcs, name, bases, attrs) + + if bases == (BaseModelForm,): + return new_class + + opts = new_class._meta = ModelFormOptions(getattr(new_class, "Meta", None)) + + # We check if a string was passed to `fields` or `exclude`, + # which is likely to be a mistake where the user typed ('foo') instead + # of ('foo',) + for opt in ["fields", "exclude", "localized_fields"]: + value = getattr(opts, opt) + if isinstance(value, str) and value != ALL_FIELDS: + msg = ( + "%(model)s.Meta.%(opt)s cannot be a string. " + "Did you mean to type: ('%(value)s',)?" + % { + "model": new_class.__name__, + "opt": opt, + "value": value, + } + ) + raise TypeError(msg) + + if opts.model: + # If a model is defined, extract form fields from it. + if opts.fields is None and opts.exclude is None: + raise ImproperlyConfigured( + "Creating a ModelForm without either the 'fields' attribute " + "or the 'exclude' attribute is prohibited; form %s " + "needs updating." % name + ) + + if opts.fields == ALL_FIELDS: + # Sentinel for fields_for_model to indicate "get the list of + # fields from the model" + opts.fields = None + + fields = fields_for_model( + opts.model, + opts.fields, + opts.exclude, + opts.widgets, + opts.formfield_callback, + opts.localized_fields, + opts.labels, + opts.help_texts, + opts.error_messages, + opts.field_classes, + # limit_choices_to will be applied during ModelForm.__init__(). + apply_limit_choices_to=False, + form_declared_fields=new_class.declared_fields, + ) + + # make sure opts.fields doesn't specify an invalid field + none_model_fields = {k for k, v in fields.items() if not v} + missing_fields = none_model_fields.difference(new_class.declared_fields) + if missing_fields: + message = "Unknown field(s) (%s) specified for %s" + message %= (", ".join(missing_fields), opts.model.__name__) + raise FieldError(message) + # Include all the other declared fields. + fields.update(new_class.declared_fields) + else: + fields = new_class.declared_fields + + new_class.base_fields = fields + + return new_class + + +class BaseModelForm(BaseForm, AltersData): + def __init__( + self, + data=None, + files=None, + auto_id="id_%s", + prefix=None, + initial=None, + error_class=ErrorList, + label_suffix=None, + empty_permitted=False, + instance=None, + use_required_attribute=None, + renderer=None, + ): + opts = self._meta + if opts.model is None: + raise ValueError("ModelForm has no model class specified.") + if instance is None: + # if we didn't get an instance, instantiate a new one + self.instance = opts.model() + object_data = {} + else: + self.instance = instance + object_data = model_to_dict(instance, opts.fields, opts.exclude) + # if initial was provided, it should override the values from instance + if initial is not None: + object_data.update(initial) + # self._validate_unique will be set to True by BaseModelForm.clean(). + # It is False by default so overriding self.clean() and failing to call + # super will stop validate_unique from being called. + self._validate_unique = False + super().__init__( + data, + files, + auto_id, + prefix, + object_data, + error_class, + label_suffix, + empty_permitted, + use_required_attribute=use_required_attribute, + renderer=renderer, + ) + for formfield in self.fields.values(): + apply_limit_choices_to_to_formfield(formfield) + + def _get_validation_exclusions(self): + """ + For backwards-compatibility, exclude several types of fields from model + validation. See tickets #12507, #12521, #12553. + """ + exclude = set() + # Build up a list of fields that should be excluded from model field + # validation and unique checks. + for f in self.instance._meta.fields: + field = f.name + # Exclude fields that aren't on the form. The developer may be + # adding these values to the model after form validation. + if field not in self.fields: + exclude.add(f.name) + + # Don't perform model validation on fields that were defined + # manually on the form and excluded via the ModelForm's Meta + # class. See #12901. + elif self._meta.fields and field not in self._meta.fields: + exclude.add(f.name) + elif self._meta.exclude and field in self._meta.exclude: + exclude.add(f.name) + + # Exclude fields that failed form validation. There's no need for + # the model fields to validate them as well. + elif field in self._errors: + exclude.add(f.name) + + # Exclude empty fields that are not required by the form, if the + # underlying model field is required. This keeps the model field + # from raising a required error. Note: don't exclude the field from + # validation if the model field allows blanks. If it does, the blank + # value may be included in a unique check, so cannot be excluded + # from validation. + else: + form_field = self.fields[field] + field_value = self.cleaned_data.get(field) + if ( + not f.blank + and not form_field.required + and field_value in form_field.empty_values + ): + exclude.add(f.name) + return exclude + + def clean(self): + self._validate_unique = True + return self.cleaned_data + + def _update_errors(self, errors): + # Override any validation error messages defined at the model level + # with those defined at the form level. + opts = self._meta + + # Allow the model generated by construct_instance() to raise + # ValidationError and have them handled in the same way as others. + if hasattr(errors, "error_dict"): + error_dict = errors.error_dict + else: + error_dict = {NON_FIELD_ERRORS: errors} + + for field, messages in error_dict.items(): + if ( + field == NON_FIELD_ERRORS + and opts.error_messages + and NON_FIELD_ERRORS in opts.error_messages + ): + error_messages = opts.error_messages[NON_FIELD_ERRORS] + elif field in self.fields: + error_messages = self.fields[field].error_messages + else: + continue + + for message in messages: + if ( + isinstance(message, ValidationError) + and message.code in error_messages + ): + message.message = error_messages[message.code] + + self.add_error(None, errors) + + def _post_clean(self): + opts = self._meta + + exclude = self._get_validation_exclusions() + + # Foreign Keys being used to represent inline relationships + # are excluded from basic field value validation. This is for two + # reasons: firstly, the value may not be supplied (#12507; the + # case of providing new values to the admin); secondly the + # object being referred to may not yet fully exist (#12749). + # However, these fields *must* be included in uniqueness checks, + # so this can't be part of _get_validation_exclusions(). + for name, field in self.fields.items(): + if isinstance(field, InlineForeignKeyField): + exclude.add(name) + + try: + self.instance = construct_instance( + self, self.instance, opts.fields, opts.exclude + ) + except ValidationError as e: + self._update_errors(e) + + try: + self.instance.full_clean(exclude=exclude, validate_unique=False) + except ValidationError as e: + self._update_errors(e) + + # Validate uniqueness if needed. + if self._validate_unique: + self.validate_unique() + + def validate_unique(self): + """ + Call the instance's validate_unique() method and update the form's + validation errors if any were raised. + """ + exclude = self._get_validation_exclusions() + try: + self.instance.validate_unique(exclude=exclude) + except ValidationError as e: + self._update_errors(e) + + def _save_m2m(self): + """ + Save the many-to-many fields and generic relations for this form. + """ + cleaned_data = self.cleaned_data + exclude = self._meta.exclude + fields = self._meta.fields + opts = self.instance._meta + # Note that for historical reasons we want to include also + # private_fields here. (GenericRelation was previously a fake + # m2m field). + for f in chain(opts.many_to_many, opts.private_fields): + if not hasattr(f, "save_form_data"): + continue + if fields and f.name not in fields: + continue + if exclude and f.name in exclude: + continue + if f.name in cleaned_data: + f.save_form_data(self.instance, cleaned_data[f.name]) + + def save(self, commit=True): + """ + Save this form's self.instance object if commit=True. Otherwise, add + a save_m2m() method to the form which can be called after the instance + is saved manually at a later time. Return the model instance. + """ + if self.errors: + raise ValueError( + "The %s could not be %s because the data didn't validate." + % ( + self.instance._meta.object_name, + "created" if self.instance._state.adding else "changed", + ) + ) + if commit: + # If committing, save the instance and the m2m data immediately. + self.instance.save() + self._save_m2m() + else: + # If not committing, add a method to the form to allow deferred + # saving of m2m data. + self.save_m2m = self._save_m2m + return self.instance + + save.alters_data = True + + +class ModelForm(BaseModelForm, metaclass=ModelFormMetaclass): + pass + + +def modelform_factory( + model, + form=ModelForm, + fields=None, + exclude=None, + formfield_callback=None, + widgets=None, + localized_fields=None, + labels=None, + help_texts=None, + error_messages=None, + field_classes=None, +): + """ + Return a ModelForm containing form fields for the given model. You can + optionally pass a `form` argument to use as a starting point for + constructing the ModelForm. + + ``fields`` is an optional list of field names. If provided, include only + the named fields in the returned fields. If omitted or '__all__', use all + fields. + + ``exclude`` is an optional list of field names. If provided, exclude the + named fields from the returned fields, even if they are listed in the + ``fields`` argument. + + ``widgets`` is a dictionary of model field names mapped to a widget. + + ``localized_fields`` is a list of names of fields which should be localized. + + ``formfield_callback`` is a callable that takes a model field and returns + a form field. + + ``labels`` is a dictionary of model field names mapped to a label. + + ``help_texts`` is a dictionary of model field names mapped to a help text. + + ``error_messages`` is a dictionary of model field names mapped to a + dictionary of error messages. + + ``field_classes`` is a dictionary of model field names mapped to a form + field class. + """ + # Create the inner Meta class. FIXME: ideally, we should be able to + # construct a ModelForm without creating and passing in a temporary + # inner class. + + # Build up a list of attributes that the Meta object will have. + attrs = {"model": model} + if fields is not None: + attrs["fields"] = fields + if exclude is not None: + attrs["exclude"] = exclude + if widgets is not None: + attrs["widgets"] = widgets + if localized_fields is not None: + attrs["localized_fields"] = localized_fields + if labels is not None: + attrs["labels"] = labels + if help_texts is not None: + attrs["help_texts"] = help_texts + if error_messages is not None: + attrs["error_messages"] = error_messages + if field_classes is not None: + attrs["field_classes"] = field_classes + + # If parent form class already has an inner Meta, the Meta we're + # creating needs to inherit from the parent's inner meta. + bases = (form.Meta,) if hasattr(form, "Meta") else () + Meta = type("Meta", bases, attrs) + if formfield_callback: + Meta.formfield_callback = staticmethod(formfield_callback) + # Give this new form class a reasonable name. + class_name = model.__name__ + "Form" + + # Class attributes for the new form class. + form_class_attrs = {"Meta": Meta} + + if getattr(Meta, "fields", None) is None and getattr(Meta, "exclude", None) is None: + raise ImproperlyConfigured( + "Calling modelform_factory without defining 'fields' or " + "'exclude' explicitly is prohibited." + ) + + # Instantiate type(form) in order to use the same metaclass as form. + return type(form)(class_name, (form,), form_class_attrs) + + +# ModelFormSets ############################################################## + + +class BaseModelFormSet(BaseFormSet, AltersData): + """ + A ``FormSet`` for editing a queryset and/or adding new objects to it. + """ + + model = None + edit_only = False + + # Set of fields that must be unique among forms of this set. + unique_fields = set() + + def __init__( + self, + data=None, + files=None, + auto_id="id_%s", + prefix=None, + queryset=None, + *, + initial=None, + **kwargs, + ): + self.queryset = queryset + self.initial_extra = initial + super().__init__( + **{ + "data": data, + "files": files, + "auto_id": auto_id, + "prefix": prefix, + **kwargs, + } + ) + + def initial_form_count(self): + """Return the number of forms that are required in this FormSet.""" + if not self.is_bound: + return len(self.get_queryset()) + return super().initial_form_count() + + def _existing_object(self, pk): + if not hasattr(self, "_object_dict"): + self._object_dict = {o.pk: o for o in self.get_queryset()} + return self._object_dict.get(pk) + + def _get_to_python(self, field): + """ + If the field is a related field, fetch the concrete field's (that + is, the ultimate pointed-to field's) to_python. + """ + while field.remote_field is not None: + field = field.remote_field.get_related_field() + return field.to_python + + def _construct_form(self, i, **kwargs): + pk_required = i < self.initial_form_count() + if pk_required: + if self.is_bound: + pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name) + try: + pk = self.data[pk_key] + except KeyError: + # The primary key is missing. The user may have tampered + # with POST data. + pass + else: + to_python = self._get_to_python(self.model._meta.pk) + try: + pk = to_python(pk) + except ValidationError: + # The primary key exists but is an invalid value. The + # user may have tampered with POST data. + pass + else: + kwargs["instance"] = self._existing_object(pk) + else: + kwargs["instance"] = self.get_queryset()[i] + elif self.initial_extra: + # Set initial values for extra forms + try: + kwargs["initial"] = self.initial_extra[i - self.initial_form_count()] + except IndexError: + pass + form = super()._construct_form(i, **kwargs) + if pk_required: + form.fields[self.model._meta.pk.name].required = True + return form + + def get_queryset(self): + if not hasattr(self, "_queryset"): + if self.queryset is not None: + qs = self.queryset + else: + qs = self.model._default_manager.get_queryset() + + # If the queryset isn't already ordered we need to add an + # artificial ordering here to make sure that all formsets + # constructed from this queryset have the same form order. + if not qs.ordered: + qs = qs.order_by(self.model._meta.pk.name) + + # Removed queryset limiting here. As per discussion re: #13023 + # on django-dev, max_num should not prevent existing + # related objects/inlines from being displayed. + self._queryset = qs + return self._queryset + + def save_new(self, form, commit=True): + """Save and return a new model instance for the given form.""" + return form.save(commit=commit) + + def save_existing(self, form, obj, commit=True): + """Save and return an existing model instance for the given form.""" + return form.save(commit=commit) + + def delete_existing(self, obj, commit=True): + """Deletes an existing model instance.""" + if commit: + obj.delete() + + def save(self, commit=True): + """ + Save model instances for every form, adding and changing instances + as necessary, and return the list of instances. + """ + if not commit: + self.saved_forms = [] + + def save_m2m(): + for form in self.saved_forms: + form.save_m2m() + + self.save_m2m = save_m2m + if self.edit_only: + return self.save_existing_objects(commit) + else: + return self.save_existing_objects(commit) + self.save_new_objects(commit) + + save.alters_data = True + + def clean(self): + self.validate_unique() + + def validate_unique(self): + # Collect unique_checks and date_checks to run from all the forms. + all_unique_checks = set() + all_date_checks = set() + forms_to_delete = self.deleted_forms + valid_forms = [ + form + for form in self.forms + if form.is_valid() and form not in forms_to_delete + ] + for form in valid_forms: + exclude = form._get_validation_exclusions() + unique_checks, date_checks = form.instance._get_unique_checks( + exclude=exclude, + include_meta_constraints=True, + ) + all_unique_checks.update(unique_checks) + all_date_checks.update(date_checks) + + errors = [] + # Do each of the unique checks (unique and unique_together) + for uclass, unique_check in all_unique_checks: + seen_data = set() + for form in valid_forms: + # Get the data for the set of fields that must be unique among + # the forms. + row_data = ( + field if field in self.unique_fields else form.cleaned_data[field] + for field in unique_check + if field in form.cleaned_data + ) + # Reduce Model instances to their primary key values + row_data = tuple( + d._get_pk_val() if hasattr(d, "_get_pk_val") + # Prevent "unhashable type: list" errors later on. + else tuple(d) if isinstance(d, list) else d + for d in row_data + ) + if row_data and None not in row_data: + # if we've already seen it then we have a uniqueness failure + if row_data in seen_data: + # poke error messages into the right places and mark + # the form as invalid + errors.append(self.get_unique_error_message(unique_check)) + form._errors[NON_FIELD_ERRORS] = self.error_class( + [self.get_form_error()], + renderer=self.renderer, + ) + # Remove the data from the cleaned_data dict since it + # was invalid. + for field in unique_check: + if field in form.cleaned_data: + del form.cleaned_data[field] + # mark the data as seen + seen_data.add(row_data) + # iterate over each of the date checks now + for date_check in all_date_checks: + seen_data = set() + uclass, lookup, field, unique_for = date_check + for form in valid_forms: + # see if we have data for both fields + if ( + form.cleaned_data + and form.cleaned_data[field] is not None + and form.cleaned_data[unique_for] is not None + ): + # if it's a date lookup we need to get the data for all the fields + if lookup == "date": + date = form.cleaned_data[unique_for] + date_data = (date.year, date.month, date.day) + # otherwise it's just the attribute on the date/datetime + # object + else: + date_data = (getattr(form.cleaned_data[unique_for], lookup),) + data = (form.cleaned_data[field],) + date_data + # if we've already seen it then we have a uniqueness failure + if data in seen_data: + # poke error messages into the right places and mark + # the form as invalid + errors.append(self.get_date_error_message(date_check)) + form._errors[NON_FIELD_ERRORS] = self.error_class( + [self.get_form_error()], + renderer=self.renderer, + ) + # Remove the data from the cleaned_data dict since it + # was invalid. + del form.cleaned_data[field] + # mark the data as seen + seen_data.add(data) + + if errors: + raise ValidationError(errors) + + def get_unique_error_message(self, unique_check): + if len(unique_check) == 1: + return gettext("Please correct the duplicate data for %(field)s.") % { + "field": unique_check[0], + } + else: + return gettext( + "Please correct the duplicate data for %(field)s, which must be unique." + ) % { + "field": get_text_list(unique_check, _("and")), + } + + def get_date_error_message(self, date_check): + return gettext( + "Please correct the duplicate data for %(field_name)s " + "which must be unique for the %(lookup)s in %(date_field)s." + ) % { + "field_name": date_check[2], + "date_field": date_check[3], + "lookup": str(date_check[1]), + } + + def get_form_error(self): + return gettext("Please correct the duplicate values below.") + + def save_existing_objects(self, commit=True): + self.changed_objects = [] + self.deleted_objects = [] + if not self.initial_forms: + return [] + + saved_instances = [] + forms_to_delete = self.deleted_forms + for form in self.initial_forms: + obj = form.instance + # If the pk is None, it means either: + # 1. The object is an unexpected empty model, created by invalid + # POST data such as an object outside the formset's queryset. + # 2. The object was already deleted from the database. + if obj.pk is None: + continue + if form in forms_to_delete: + self.deleted_objects.append(obj) + self.delete_existing(obj, commit=commit) + elif form.has_changed(): + self.changed_objects.append((obj, form.changed_data)) + saved_instances.append(self.save_existing(form, obj, commit=commit)) + if not commit: + self.saved_forms.append(form) + return saved_instances + + def save_new_objects(self, commit=True): + self.new_objects = [] + for form in self.extra_forms: + if not form.has_changed(): + continue + # If someone has marked an add form for deletion, don't save the + # object. + if self.can_delete and self._should_delete_form(form): + continue + self.new_objects.append(self.save_new(form, commit=commit)) + if not commit: + self.saved_forms.append(form) + return self.new_objects + + def add_fields(self, form, index): + """Add a hidden field for the object's primary key.""" + from django.db.models import AutoField, ForeignKey, OneToOneField + + self._pk_field = pk = self.model._meta.pk + # If a pk isn't editable, then it won't be on the form, so we need to + # add it here so we can tell which object is which when we get the + # data back. Generally, pk.editable should be false, but for some + # reason, auto_created pk fields and AutoField's editable attribute is + # True, so check for that as well. + + def pk_is_not_editable(pk): + return ( + (not pk.editable) + or (pk.auto_created or isinstance(pk, AutoField)) + or ( + pk.remote_field + and pk.remote_field.parent_link + and pk_is_not_editable(pk.remote_field.model._meta.pk) + ) + ) + + if pk_is_not_editable(pk) or pk.name not in form.fields: + if form.is_bound: + # If we're adding the related instance, ignore its primary key + # as it could be an auto-generated default which isn't actually + # in the database. + pk_value = None if form.instance._state.adding else form.instance.pk + else: + try: + if index is not None: + pk_value = self.get_queryset()[index].pk + else: + pk_value = None + except IndexError: + pk_value = None + if isinstance(pk, (ForeignKey, OneToOneField)): + qs = pk.remote_field.model._default_manager.get_queryset() + else: + qs = self.model._default_manager.get_queryset() + qs = qs.using(form.instance._state.db) + if form._meta.widgets: + widget = form._meta.widgets.get(self._pk_field.name, HiddenInput) + else: + widget = HiddenInput + form.fields[self._pk_field.name] = ModelChoiceField( + qs, initial=pk_value, required=False, widget=widget + ) + super().add_fields(form, index) + + +def modelformset_factory( + model, + form=ModelForm, + formfield_callback=None, + formset=BaseModelFormSet, + extra=1, + can_delete=False, + can_order=False, + max_num=None, + fields=None, + exclude=None, + widgets=None, + validate_max=False, + localized_fields=None, + labels=None, + help_texts=None, + error_messages=None, + min_num=None, + validate_min=False, + field_classes=None, + absolute_max=None, + can_delete_extra=True, + renderer=None, + edit_only=False, +): + """Return a FormSet class for the given Django model class.""" + meta = getattr(form, "Meta", None) + if ( + getattr(meta, "fields", fields) is None + and getattr(meta, "exclude", exclude) is None + ): + raise ImproperlyConfigured( + "Calling modelformset_factory without defining 'fields' or " + "'exclude' explicitly is prohibited." + ) + + form = modelform_factory( + model, + form=form, + fields=fields, + exclude=exclude, + formfield_callback=formfield_callback, + widgets=widgets, + localized_fields=localized_fields, + labels=labels, + help_texts=help_texts, + error_messages=error_messages, + field_classes=field_classes, + ) + FormSet = formset_factory( + form, + formset, + extra=extra, + min_num=min_num, + max_num=max_num, + can_order=can_order, + can_delete=can_delete, + validate_min=validate_min, + validate_max=validate_max, + absolute_max=absolute_max, + can_delete_extra=can_delete_extra, + renderer=renderer, + ) + FormSet.model = model + FormSet.edit_only = edit_only + return FormSet + + +# InlineFormSets ############################################################# + + +class BaseInlineFormSet(BaseModelFormSet): + """A formset for child objects related to a parent.""" + + def __init__( + self, + data=None, + files=None, + instance=None, + save_as_new=False, + prefix=None, + queryset=None, + **kwargs, + ): + if instance is None: + self.instance = self.fk.remote_field.model() + else: + self.instance = instance + self.save_as_new = save_as_new + if queryset is None: + queryset = self.model._default_manager + if self.instance.pk is not None: + qs = queryset.filter(**{self.fk.name: self.instance}) + else: + qs = queryset.none() + self.unique_fields = {self.fk.name} + super().__init__(data, files, prefix=prefix, queryset=qs, **kwargs) + + # Add the generated field to form._meta.fields if it's defined to make + # sure validation isn't skipped on that field. + if self.form._meta.fields and self.fk.name not in self.form._meta.fields: + if isinstance(self.form._meta.fields, tuple): + self.form._meta.fields = list(self.form._meta.fields) + self.form._meta.fields.append(self.fk.name) + + def initial_form_count(self): + if self.save_as_new: + return 0 + return super().initial_form_count() + + def _construct_form(self, i, **kwargs): + form = super()._construct_form(i, **kwargs) + if self.save_as_new: + mutable = getattr(form.data, "_mutable", None) + # Allow modifying an immutable QueryDict. + if mutable is not None: + form.data._mutable = True + # Remove the primary key from the form's data, we are only + # creating new instances + form.data[form.add_prefix(self._pk_field.name)] = None + # Remove the foreign key from the form's data + form.data[form.add_prefix(self.fk.name)] = None + if mutable is not None: + form.data._mutable = mutable + + # Set the fk value here so that the form can do its validation. + fk_value = self.instance.pk + if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name: + fk_value = getattr(self.instance, self.fk.remote_field.field_name) + fk_value = getattr(fk_value, "pk", fk_value) + setattr(form.instance, self.fk.get_attname(), fk_value) + return form + + @classmethod + def get_default_prefix(cls): + return cls.fk.remote_field.get_accessor_name(model=cls.model).replace("+", "") + + def save_new(self, form, commit=True): + # Ensure the latest copy of the related instance is present on each + # form (it may have been saved after the formset was originally + # instantiated). + setattr(form.instance, self.fk.name, self.instance) + return super().save_new(form, commit=commit) + + def add_fields(self, form, index): + super().add_fields(form, index) + if self._pk_field == self.fk: + name = self._pk_field.name + kwargs = {"pk_field": True} + else: + # The foreign key field might not be on the form, so we poke at the + # Model field to get the label, since we need that for error messages. + name = self.fk.name + kwargs = { + "label": getattr( + form.fields.get(name), "label", capfirst(self.fk.verbose_name) + ) + } + + # The InlineForeignKeyField assumes that the foreign key relation is + # based on the parent model's pk. If this isn't the case, set to_field + # to correctly resolve the initial form value. + if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name: + kwargs["to_field"] = self.fk.remote_field.field_name + + # If we're adding a new object, ignore a parent's auto-generated key + # as it will be regenerated on the save request. + if self.instance._state.adding: + if kwargs.get("to_field") is not None: + to_field = self.instance._meta.get_field(kwargs["to_field"]) + else: + to_field = self.instance._meta.pk + + if to_field.has_default() and ( + # Don't ignore a parent's auto-generated key if it's not the + # parent model's pk and form data is provided. + to_field.attname == self.fk.remote_field.model._meta.pk.name + or not form.data + ): + setattr(self.instance, to_field.attname, None) + + form.fields[name] = InlineForeignKeyField(self.instance, **kwargs) + + def get_unique_error_message(self, unique_check): + unique_check = [field for field in unique_check if field != self.fk.name] + return super().get_unique_error_message(unique_check) + + +def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False): + """ + Find and return the ForeignKey from model to parent if there is one + (return None if can_fail is True and no such field exists). If fk_name is + provided, assume it is the name of the ForeignKey field. Unless can_fail is + True, raise an exception if there isn't a ForeignKey from model to + parent_model. + """ + # avoid circular import + from django.db.models import ForeignKey + + opts = model._meta + if fk_name: + fks_to_parent = [f for f in opts.fields if f.name == fk_name] + if len(fks_to_parent) == 1: + fk = fks_to_parent[0] + parent_list = parent_model._meta.get_parent_list() + if ( + not isinstance(fk, ForeignKey) + or ( + # ForeignKey to proxy models. + fk.remote_field.model._meta.proxy + and fk.remote_field.model._meta.proxy_for_model not in parent_list + ) + or ( + # ForeignKey to concrete models. + not fk.remote_field.model._meta.proxy + and fk.remote_field.model != parent_model + and fk.remote_field.model not in parent_list + ) + ): + raise ValueError( + "fk_name '%s' is not a ForeignKey to '%s'." + % (fk_name, parent_model._meta.label) + ) + elif not fks_to_parent: + raise ValueError( + "'%s' has no field named '%s'." % (model._meta.label, fk_name) + ) + else: + # Try to discover what the ForeignKey from model to parent_model is + parent_list = parent_model._meta.get_parent_list() + fks_to_parent = [ + f + for f in opts.fields + if isinstance(f, ForeignKey) + and ( + f.remote_field.model == parent_model + or f.remote_field.model in parent_list + or ( + f.remote_field.model._meta.proxy + and f.remote_field.model._meta.proxy_for_model in parent_list + ) + ) + ] + if len(fks_to_parent) == 1: + fk = fks_to_parent[0] + elif not fks_to_parent: + if can_fail: + return + raise ValueError( + "'%s' has no ForeignKey to '%s'." + % ( + model._meta.label, + parent_model._meta.label, + ) + ) + else: + raise ValueError( + "'%s' has more than one ForeignKey to '%s'. You must specify " + "a 'fk_name' attribute." + % ( + model._meta.label, + parent_model._meta.label, + ) + ) + return fk + + +def inlineformset_factory( + parent_model, + model, + form=ModelForm, + formset=BaseInlineFormSet, + fk_name=None, + fields=None, + exclude=None, + extra=3, + can_order=False, + can_delete=True, + max_num=None, + formfield_callback=None, + widgets=None, + validate_max=False, + localized_fields=None, + labels=None, + help_texts=None, + error_messages=None, + min_num=None, + validate_min=False, + field_classes=None, + absolute_max=None, + can_delete_extra=True, + renderer=None, + edit_only=False, +): + """ + Return an ``InlineFormSet`` for the given kwargs. + + ``fk_name`` must be provided if ``model`` has more than one ``ForeignKey`` + to ``parent_model``. + """ + fk = _get_foreign_key(parent_model, model, fk_name=fk_name) + # enforce a max_num=1 when the foreign key to the parent model is unique. + if fk.unique: + max_num = 1 + kwargs = { + "form": form, + "formfield_callback": formfield_callback, + "formset": formset, + "extra": extra, + "can_delete": can_delete, + "can_order": can_order, + "fields": fields, + "exclude": exclude, + "min_num": min_num, + "max_num": max_num, + "widgets": widgets, + "validate_min": validate_min, + "validate_max": validate_max, + "localized_fields": localized_fields, + "labels": labels, + "help_texts": help_texts, + "error_messages": error_messages, + "field_classes": field_classes, + "absolute_max": absolute_max, + "can_delete_extra": can_delete_extra, + "renderer": renderer, + "edit_only": edit_only, + } + FormSet = modelformset_factory(model, **kwargs) + FormSet.fk = fk + return FormSet + + +# Fields ##################################################################### + + +class InlineForeignKeyField(Field): + """ + A basic integer field that deals with validating the given value to a + given parent instance in an inline. + """ + + widget = HiddenInput + default_error_messages = { + "invalid_choice": _("The inline value did not match the parent instance."), + } + + def __init__(self, parent_instance, *args, pk_field=False, to_field=None, **kwargs): + self.parent_instance = parent_instance + self.pk_field = pk_field + self.to_field = to_field + if self.parent_instance is not None: + if self.to_field: + kwargs["initial"] = getattr(self.parent_instance, self.to_field) + else: + kwargs["initial"] = self.parent_instance.pk + kwargs["required"] = False + super().__init__(*args, **kwargs) + + def clean(self, value): + if value in self.empty_values: + if self.pk_field: + return None + # if there is no value act as we did before. + return self.parent_instance + # ensure the we compare the values as equal types. + if self.to_field: + orig = getattr(self.parent_instance, self.to_field) + else: + orig = self.parent_instance.pk + if str(value) != str(orig): + raise ValidationError( + self.error_messages["invalid_choice"], code="invalid_choice" + ) + return self.parent_instance + + def has_changed(self, initial, data): + return False + + +class ModelChoiceIteratorValue: + def __init__(self, value, instance): + self.value = value + self.instance = instance + + def __str__(self): + return str(self.value) + + def __hash__(self): + return hash(self.value) + + def __eq__(self, other): + if isinstance(other, ModelChoiceIteratorValue): + other = other.value + return self.value == other + + +class ModelChoiceIterator: + def __init__(self, field): + self.field = field + self.queryset = field.queryset + + def __iter__(self): + if self.field.empty_label is not None: + yield ("", self.field.empty_label) + queryset = self.queryset + # Can't use iterator() when queryset uses prefetch_related() + if not queryset._prefetch_related_lookups: + queryset = queryset.iterator() + for obj in queryset: + yield self.choice(obj) + + def __len__(self): + # count() adds a query but uses less memory since the QuerySet results + # won't be cached. In most cases, the choices will only be iterated on, + # and __len__() won't be called. + return self.queryset.count() + (1 if self.field.empty_label is not None else 0) + + def __bool__(self): + return self.field.empty_label is not None or self.queryset.exists() + + def choice(self, obj): + return ( + ModelChoiceIteratorValue(self.field.prepare_value(obj), obj), + self.field.label_from_instance(obj), + ) + + +class ModelChoiceField(ChoiceField): + """A ChoiceField whose choices are a model QuerySet.""" + + # This class is a subclass of ChoiceField for purity, but it doesn't + # actually use any of ChoiceField's implementation. + default_error_messages = { + "invalid_choice": _( + "Select a valid choice. That choice is not one of the available choices." + ), + } + iterator = ModelChoiceIterator + + def __init__( + self, + queryset, + *, + empty_label="---------", + required=True, + widget=None, + label=None, + initial=None, + help_text="", + to_field_name=None, + limit_choices_to=None, + blank=False, + **kwargs, + ): + # Call Field instead of ChoiceField __init__() because we don't need + # ChoiceField.__init__(). + Field.__init__( + self, + required=required, + widget=widget, + label=label, + initial=initial, + help_text=help_text, + **kwargs, + ) + if (required and initial is not None) or ( + isinstance(self.widget, RadioSelect) and not blank + ): + self.empty_label = None + else: + self.empty_label = empty_label + self.queryset = queryset + self.limit_choices_to = limit_choices_to # limit the queryset later. + self.to_field_name = to_field_name + + def get_limit_choices_to(self): + """ + Return ``limit_choices_to`` for this form field. + + If it is a callable, invoke it and return the result. + """ + if callable(self.limit_choices_to): + return self.limit_choices_to() + return self.limit_choices_to + + def __deepcopy__(self, memo): + result = super(ChoiceField, self).__deepcopy__(memo) + # Need to force a new ModelChoiceIterator to be created, bug #11183 + if self.queryset is not None: + result.queryset = self.queryset.all() + return result + + def _get_queryset(self): + return self._queryset + + def _set_queryset(self, queryset): + self._queryset = None if queryset is None else queryset.all() + self.widget.choices = self.choices + + queryset = property(_get_queryset, _set_queryset) + + # this method will be used to create object labels by the QuerySetIterator. + # Override it to customize the label. + def label_from_instance(self, obj): + """ + Convert objects into strings and generate the labels for the choices + presented by this object. Subclasses can override this method to + customize the display of the choices. + """ + return str(obj) + + def _get_choices(self): + # If self._choices is set, then somebody must have manually set + # the property self.choices. In this case, just return self._choices. + if hasattr(self, "_choices"): + return self._choices + + # Otherwise, execute the QuerySet in self.queryset to determine the + # choices dynamically. Return a fresh ModelChoiceIterator that has not been + # consumed. Note that we're instantiating a new ModelChoiceIterator *each* + # time _get_choices() is called (and, thus, each time self.choices is + # accessed) so that we can ensure the QuerySet has not been consumed. This + # construct might look complicated but it allows for lazy evaluation of + # the queryset. + return self.iterator(self) + + choices = property(_get_choices, ChoiceField._set_choices) + + def prepare_value(self, value): + if hasattr(value, "_meta"): + if self.to_field_name: + return value.serializable_value(self.to_field_name) + else: + return value.pk + return super().prepare_value(value) + + def to_python(self, value): + if value in self.empty_values: + return None + try: + key = self.to_field_name or "pk" + if isinstance(value, self.queryset.model): + value = getattr(value, key) + value = self.queryset.get(**{key: value}) + except (ValueError, TypeError, self.queryset.model.DoesNotExist): + raise ValidationError( + self.error_messages["invalid_choice"], + code="invalid_choice", + params={"value": value}, + ) + return value + + def validate(self, value): + return Field.validate(self, value) + + def has_changed(self, initial, data): + if self.disabled: + return False + initial_value = initial if initial is not None else "" + data_value = data if data is not None else "" + return str(self.prepare_value(initial_value)) != str(data_value) + + +class ModelMultipleChoiceField(ModelChoiceField): + """A MultipleChoiceField whose choices are a model QuerySet.""" + + widget = SelectMultiple + hidden_widget = MultipleHiddenInput + default_error_messages = { + "invalid_list": _("Enter a list of values."), + "invalid_choice": _( + "Select a valid choice. %(value)s is not one of the available choices." + ), + "invalid_pk_value": _("“%(pk)s” is not a valid value."), + } + + def __init__(self, queryset, **kwargs): + super().__init__(queryset, empty_label=None, **kwargs) + + def to_python(self, value): + if not value: + return [] + return list(self._check_values(value)) + + def clean(self, value): + value = self.prepare_value(value) + if self.required and not value: + raise ValidationError(self.error_messages["required"], code="required") + elif not self.required and not value: + return self.queryset.none() + if not isinstance(value, (list, tuple)): + raise ValidationError( + self.error_messages["invalid_list"], + code="invalid_list", + ) + qs = self._check_values(value) + # Since this overrides the inherited ModelChoiceField.clean + # we run custom validators here + self.run_validators(value) + return qs + + def _check_values(self, value): + """ + Given a list of possible PK values, return a QuerySet of the + corresponding objects. Raise a ValidationError if a given value is + invalid (not a valid PK, not in the queryset, etc.) + """ + key = self.to_field_name or "pk" + # deduplicate given values to avoid creating many querysets or + # requiring the database backend deduplicate efficiently. + try: + value = frozenset(value) + except TypeError: + # list of lists isn't hashable, for example + raise ValidationError( + self.error_messages["invalid_list"], + code="invalid_list", + ) + for pk in value: + try: + self.queryset.filter(**{key: pk}) + except (ValueError, TypeError): + raise ValidationError( + self.error_messages["invalid_pk_value"], + code="invalid_pk_value", + params={"pk": pk}, + ) + qs = self.queryset.filter(**{"%s__in" % key: value}) + pks = {str(getattr(o, key)) for o in qs} + for val in value: + if str(val) not in pks: + raise ValidationError( + self.error_messages["invalid_choice"], + code="invalid_choice", + params={"value": val}, + ) + return qs + + def prepare_value(self, value): + if ( + hasattr(value, "__iter__") + and not isinstance(value, str) + and not hasattr(value, "_meta") + ): + prepare_value = super().prepare_value + return [prepare_value(v) for v in value] + return super().prepare_value(value) + + def has_changed(self, initial, data): + if self.disabled: + return False + if initial is None: + initial = [] + if data is None: + data = [] + if len(initial) != len(data): + return True + initial_set = {str(value) for value in self.prepare_value(initial)} + data_set = {str(value) for value in data} + return data_set != initial_set + + +def modelform_defines_fields(form_class): + return hasattr(form_class, "_meta") and ( + form_class._meta.fields is not None or form_class._meta.exclude is not None + ) diff --git a/testbed/django__django/django/forms/renderers.py b/testbed/django__django/django/forms/renderers.py new file mode 100644 index 0000000000000000000000000000000000000000..970d05804ccb35f40cb7a6edf7c144eb4051beea --- /dev/null +++ b/testbed/django__django/django/forms/renderers.py @@ -0,0 +1,110 @@ +import functools +import warnings +from pathlib import Path + +from django.conf import settings +from django.template.backends.django import DjangoTemplates +from django.template.loader import get_template +from django.utils.deprecation import RemovedInDjango60Warning +from django.utils.functional import cached_property +from django.utils.module_loading import import_string + + +@functools.lru_cache +def get_default_renderer(): + renderer_class = import_string(settings.FORM_RENDERER) + return renderer_class() + + +class BaseRenderer: + form_template_name = "django/forms/div.html" + formset_template_name = "django/forms/formsets/div.html" + field_template_name = "django/forms/field.html" + + def get_template(self, template_name): + raise NotImplementedError("subclasses must implement get_template()") + + def render(self, template_name, context, request=None): + template = self.get_template(template_name) + return template.render(context, request=request).strip() + + +class EngineMixin: + def get_template(self, template_name): + return self.engine.get_template(template_name) + + @cached_property + def engine(self): + return self.backend( + { + "APP_DIRS": True, + "DIRS": [Path(__file__).parent / self.backend.app_dirname], + "NAME": "djangoforms", + "OPTIONS": {}, + } + ) + + +class DjangoTemplates(EngineMixin, BaseRenderer): + """ + Load Django templates from the built-in widget templates in + django/forms/templates and from apps' 'templates' directory. + """ + + backend = DjangoTemplates + + +class Jinja2(EngineMixin, BaseRenderer): + """ + Load Jinja2 templates from the built-in widget templates in + django/forms/jinja2 and from apps' 'jinja2' directory. + """ + + @cached_property + def backend(self): + from django.template.backends.jinja2 import Jinja2 + + return Jinja2 + + +# RemovedInDjango60Warning. +class DjangoDivFormRenderer(DjangoTemplates): + """ + Load Django templates from django/forms/templates and from apps' + 'templates' directory and use the 'div.html' template to render forms and + formsets. + """ + + def __init__(self, *args, **kwargs): + warnings.warn( + "The DjangoDivFormRenderer transitional form renderer is deprecated. Use " + "DjangoTemplates instead.", + RemovedInDjango60Warning, + ) + super().__init__(*args, **kwargs) + + +# RemovedInDjango60Warning. +class Jinja2DivFormRenderer(Jinja2): + """ + Load Jinja2 templates from the built-in widget templates in + django/forms/jinja2 and from apps' 'jinja2' directory. + """ + + def __init__(self, *args, **kwargs): + warnings.warn( + "The Jinja2DivFormRenderer transitional form renderer is deprecated. Use " + "Jinja2 instead.", + RemovedInDjango60Warning, + ) + super().__init__(*args, **kwargs) + + +class TemplatesSetting(BaseRenderer): + """ + Load templates using template.loader.get_template() which is configured + based on settings.TEMPLATES. + """ + + def get_template(self, template_name): + return get_template(template_name) diff --git a/testbed/django__django/django/forms/templates/django/forms/attrs.html b/testbed/django__django/django/forms/templates/django/forms/attrs.html new file mode 100644 index 0000000000000000000000000000000000000000..50de36bae0f6ceefba3c3770e99314626d87fc2f --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/attrs.html @@ -0,0 +1 @@ +{% for name, value in attrs.items %}{% if value is not False %} {{ name }}{% if value is not True %}="{{ value|stringformat:'s' }}"{% endif %}{% endif %}{% endfor %} \ No newline at end of file diff --git a/testbed/django__django/django/forms/templates/django/forms/div.html b/testbed/django__django/django/forms/templates/django/forms/div.html new file mode 100644 index 0000000000000000000000000000000000000000..c20eead4aadf2a8aa1055f1e28fce43f9a9101ba --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/div.html @@ -0,0 +1,15 @@ +{{ errors }} +{% if errors and not fields %} +
{% for field in hidden_fields %}{{ field }}{% endfor %}
+{% endif %} +{% for field, errors in fields %} + + {{ field.as_field_group }} + {% if forloop.last %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} + +{% endfor %} +{% if not fields and not errors %} + {% for field in hidden_fields %}{{ field }}{% endfor %} +{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/dict/default.html b/testbed/django__django/django/forms/templates/django/forms/errors/dict/default.html new file mode 100644 index 0000000000000000000000000000000000000000..8a833c658ddc2f1e9a911de2d5087c64ea0165f0 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/dict/default.html @@ -0,0 +1 @@ +{% include "django/forms/errors/dict/ul.html" %} \ No newline at end of file diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/dict/text.txt b/testbed/django__django/django/forms/templates/django/forms/errors/dict/text.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc9fd80c99ce0182ad391c4455d873f7ecc98f72 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/dict/text.txt @@ -0,0 +1,3 @@ +{% for field, errors in errors %}* {{ field }} +{% for error in errors %} * {{ error }} +{% endfor %}{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/dict/ul.html b/testbed/django__django/django/forms/templates/django/forms/errors/dict/ul.html new file mode 100644 index 0000000000000000000000000000000000000000..c16fd6591450db3d6ae4b4d34ed82250a97c53b5 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/dict/ul.html @@ -0,0 +1 @@ +{% if errors %}
    {% for field, error in errors %}
  • {{ field }}{{ error }}
  • {% endfor %}
{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/list/default.html b/testbed/django__django/django/forms/templates/django/forms/errors/list/default.html new file mode 100644 index 0000000000000000000000000000000000000000..b174f26f4f703b72f9f1cadb23f27fcc5efb9fb8 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/list/default.html @@ -0,0 +1 @@ +{% include "django/forms/errors/list/ul.html" %} \ No newline at end of file diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/list/text.txt b/testbed/django__django/django/forms/templates/django/forms/errors/list/text.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa7f870b474e63f59b6df640fa9b1005f73c16c5 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/list/text.txt @@ -0,0 +1,2 @@ +{% for error in errors %}* {{ error }} +{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/errors/list/ul.html b/testbed/django__django/django/forms/templates/django/forms/errors/list/ul.html new file mode 100644 index 0000000000000000000000000000000000000000..57b34ccb88a38d891ce1ed54feaa1d04c02fdf3f --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/errors/list/ul.html @@ -0,0 +1 @@ +{% if errors %}
    {% for error in errors %}
  • {{ error }}
  • {% endfor %}
{% endif %} \ No newline at end of file diff --git a/testbed/django__django/django/forms/templates/django/forms/field.html b/testbed/django__django/django/forms/templates/django/forms/field.html new file mode 100644 index 0000000000000000000000000000000000000000..72fb357b71c3579042cdad158c94abe94f1ab5b0 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/field.html @@ -0,0 +1,10 @@ +{% if field.use_fieldset %} +
+ {% if field.label %}{{ field.legend_tag }}{% endif %} +{% else %} + {% if field.label %}{{ field.label_tag }}{% endif %} +{% endif %} +{% if field.help_text %}
{{ field.help_text|safe }}
{% endif %} +{{ field.errors }} +{{ field }} +{% if field.use_fieldset %}
{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/formsets/div.html b/testbed/django__django/django/forms/templates/django/forms/formsets/div.html new file mode 100644 index 0000000000000000000000000000000000000000..93499897d409f803af1e6f285c6b70f068153301 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/formsets/div.html @@ -0,0 +1 @@ +{{ formset.management_form }}{% for form in formset %}{{ form.as_div }}{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/formsets/p.html b/testbed/django__django/django/forms/templates/django/forms/formsets/p.html new file mode 100644 index 0000000000000000000000000000000000000000..00c2df6b3ee9b677eb96f42b41c404aeec979f0d --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/formsets/p.html @@ -0,0 +1 @@ +{{ formset.management_form }}{% for form in formset %}{{ form.as_p }}{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/formsets/table.html b/testbed/django__django/django/forms/templates/django/forms/formsets/table.html new file mode 100644 index 0000000000000000000000000000000000000000..4fa5e425480b38aa59140c34db2bb40d004062b0 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/formsets/table.html @@ -0,0 +1 @@ +{{ formset.management_form }}{% for form in formset %}{{ form.as_table }}{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/formsets/ul.html b/testbed/django__django/django/forms/templates/django/forms/formsets/ul.html new file mode 100644 index 0000000000000000000000000000000000000000..272e1290ee8072ed1f4fe849428b2b9042c60a2f --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/formsets/ul.html @@ -0,0 +1 @@ +{{ formset.management_form }}{% for form in formset %}{{ form.as_ul }}{% endfor %} diff --git a/testbed/django__django/django/forms/templates/django/forms/label.html b/testbed/django__django/django/forms/templates/django/forms/label.html new file mode 100644 index 0000000000000000000000000000000000000000..0eba630e82ae0b07ce8d6c4d5debe0854422faf1 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/label.html @@ -0,0 +1 @@ +{% if use_tag %}<{{ tag }}{% include 'django/forms/attrs.html' %}>{{ label }}{% else %}{{ label }}{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/p.html b/testbed/django__django/django/forms/templates/django/forms/p.html new file mode 100644 index 0000000000000000000000000000000000000000..829c42eca6ad35dacc8cfe2df6fafc27382904f9 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/p.html @@ -0,0 +1,20 @@ +{{ errors }} +{% if errors and not fields %} +

{% for field in hidden_fields %}{{ field }}{% endfor %}

+{% endif %} +{% for field, errors in fields %} + {{ errors }} + + {% if field.label %}{{ field.label_tag }}{% endif %} + {{ field }} + {% if field.help_text %} + {{ field.help_text|safe }} + {% endif %} + {% if forloop.last %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} +

+{% endfor %} +{% if not fields and not errors %} + {% for field in hidden_fields %}{{ field }}{% endfor %} +{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/table.html b/testbed/django__django/django/forms/templates/django/forms/table.html new file mode 100644 index 0000000000000000000000000000000000000000..5d41bc440260e1088d62c6366d1e0629ffbc772a --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/table.html @@ -0,0 +1,29 @@ +{% if errors %} + + + {{ errors }} + {% if not fields %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} + + +{% endif %} +{% for field, errors in fields %} + + {% if field.label %}{{ field.label_tag }}{% endif %} + + {{ errors }} + {{ field }} + {% if field.help_text %} +
+ {{ field.help_text|safe }} + {% endif %} + {% if forloop.last %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} + + +{% endfor %} +{% if not fields and not errors %} + {% for field in hidden_fields %}{{ field }}{% endfor %} +{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/ul.html b/testbed/django__django/django/forms/templates/django/forms/ul.html new file mode 100644 index 0000000000000000000000000000000000000000..7383b96235707863f86063d3f738469f06ae0c88 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/ul.html @@ -0,0 +1,24 @@ +{% if errors %} +
  • + {{ errors }} + {% if not fields %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} +
  • +{% endif %} +{% for field, errors in fields %} + + {{ errors }} + {% if field.label %}{{ field.label_tag }}{% endif %} + {{ field }} + {% if field.help_text %} + {{ field.help_text|safe }} + {% endif %} + {% if forloop.last %} + {% for field in hidden_fields %}{{ field }}{% endfor %} + {% endif %} + +{% endfor %} +{% if not fields and not errors %} + {% for field in hidden_fields %}{{ field }}{% endfor %} +{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/attrs.html b/testbed/django__django/django/forms/templates/django/forms/widgets/attrs.html new file mode 100644 index 0000000000000000000000000000000000000000..7a5592afcb22349b9e2dee7434a7e899dc2fde11 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/attrs.html @@ -0,0 +1 @@ +{% for name, value in widget.attrs.items %}{% if value is not False %} {{ name }}{% if value is not True %}="{{ value|stringformat:'s' }}"{% endif %}{% endif %}{% endfor %} \ No newline at end of file diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox.html b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_option.html b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_option.html new file mode 100644 index 0000000000000000000000000000000000000000..bb9acbafd9730d939ca3fddc60d551662d4f635c --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_option.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input_option.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_select.html b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_select.html new file mode 100644 index 0000000000000000000000000000000000000000..780899af446da6c3de2150e81aeede139afc8ed9 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/checkbox_select.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/multiple_input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/clearable_file_input.html b/testbed/django__django/django/forms/templates/django/forms/widgets/clearable_file_input.html new file mode 100644 index 0000000000000000000000000000000000000000..93812837f010b1529667d2839a23b25b30011d96 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/clearable_file_input.html @@ -0,0 +1,5 @@ +{% if widget.is_initial %}{{ widget.initial_text }}: {{ widget.value }}{% if not widget.required %} + +{% endif %}
    +{{ widget.input_text }}:{% endif %} + diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/date.html b/testbed/django__django/django/forms/templates/django/forms/widgets/date.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/date.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/datetime.html b/testbed/django__django/django/forms/templates/django/forms/widgets/datetime.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/datetime.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/email.html b/testbed/django__django/django/forms/templates/django/forms/widgets/email.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/email.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/file.html b/testbed/django__django/django/forms/templates/django/forms/widgets/file.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/file.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/hidden.html b/testbed/django__django/django/forms/templates/django/forms/widgets/hidden.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/hidden.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/input.html b/testbed/django__django/django/forms/templates/django/forms/widgets/input.html new file mode 100644 index 0000000000000000000000000000000000000000..9010a92145063e89d8b85191fa455d78a75d5a49 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/input.html @@ -0,0 +1 @@ + diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/input_option.html b/testbed/django__django/django/forms/templates/django/forms/widgets/input_option.html new file mode 100644 index 0000000000000000000000000000000000000000..48cd65b93af413f3540535a0a7fe425bd184dc94 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/input_option.html @@ -0,0 +1 @@ +{% if widget.wrap_label %}{% endif %}{% include "django/forms/widgets/input.html" %}{% if widget.wrap_label %} {{ widget.label }}{% endif %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_hidden.html b/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_hidden.html new file mode 100644 index 0000000000000000000000000000000000000000..b9695deb0220ddea6a194282c3824259a94605f8 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_hidden.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/multiwidget.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_input.html b/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_input.html new file mode 100644 index 0000000000000000000000000000000000000000..2a0fec6ecc1f9d5db9717ea1c463212a3b16c981 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/multiple_input.html @@ -0,0 +1,5 @@ +{% with id=widget.attrs.id %}{% for group, options, index in widget.optgroups %}{% if group %} +
    {% endif %}{% for option in options %}
    + {% include option.template_name with widget=option %}
    {% endfor %}{% if group %} +
    {% endif %}{% endfor %} +{% endwith %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/multiwidget.html b/testbed/django__django/django/forms/templates/django/forms/widgets/multiwidget.html new file mode 100644 index 0000000000000000000000000000000000000000..7e687a136bd9634abbb633ef9f59bff20d4a0640 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/multiwidget.html @@ -0,0 +1 @@ +{% spaceless %}{% for widget in widget.subwidgets %}{% include widget.template_name %}{% endfor %}{% endspaceless %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/number.html b/testbed/django__django/django/forms/templates/django/forms/widgets/number.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/number.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/password.html b/testbed/django__django/django/forms/templates/django/forms/widgets/password.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/password.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/radio.html b/testbed/django__django/django/forms/templates/django/forms/widgets/radio.html new file mode 100644 index 0000000000000000000000000000000000000000..780899af446da6c3de2150e81aeede139afc8ed9 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/radio.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/multiple_input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/radio_option.html b/testbed/django__django/django/forms/templates/django/forms/widgets/radio_option.html new file mode 100644 index 0000000000000000000000000000000000000000..bb9acbafd9730d939ca3fddc60d551662d4f635c --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/radio_option.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input_option.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/select.html b/testbed/django__django/django/forms/templates/django/forms/widgets/select.html new file mode 100644 index 0000000000000000000000000000000000000000..4d1f6b057b7f9f46280928a575ad091f30b7d85e --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/select.html @@ -0,0 +1,5 @@ + diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/select_date.html b/testbed/django__django/django/forms/templates/django/forms/widgets/select_date.html new file mode 100644 index 0000000000000000000000000000000000000000..32fda82609f8b44d7199901b51d707899b93cdd8 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/select_date.html @@ -0,0 +1 @@ +{% include 'django/forms/widgets/multiwidget.html' %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/select_option.html b/testbed/django__django/django/forms/templates/django/forms/widgets/select_option.html new file mode 100644 index 0000000000000000000000000000000000000000..8d31961dd336bb2e9f8d9b204d6c8b3280d4253a --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/select_option.html @@ -0,0 +1 @@ + diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/splitdatetime.html b/testbed/django__django/django/forms/templates/django/forms/widgets/splitdatetime.html new file mode 100644 index 0000000000000000000000000000000000000000..32fda82609f8b44d7199901b51d707899b93cdd8 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/splitdatetime.html @@ -0,0 +1 @@ +{% include 'django/forms/widgets/multiwidget.html' %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/splithiddendatetime.html b/testbed/django__django/django/forms/templates/django/forms/widgets/splithiddendatetime.html new file mode 100644 index 0000000000000000000000000000000000000000..32fda82609f8b44d7199901b51d707899b93cdd8 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/splithiddendatetime.html @@ -0,0 +1 @@ +{% include 'django/forms/widgets/multiwidget.html' %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/text.html b/testbed/django__django/django/forms/templates/django/forms/widgets/text.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/text.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/textarea.html b/testbed/django__django/django/forms/templates/django/forms/widgets/textarea.html new file mode 100644 index 0000000000000000000000000000000000000000..b86766c89496fa0e82e88349e9bad0fefd5768f3 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/textarea.html @@ -0,0 +1,2 @@ + diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/time.html b/testbed/django__django/django/forms/templates/django/forms/widgets/time.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/time.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/templates/django/forms/widgets/url.html b/testbed/django__django/django/forms/templates/django/forms/widgets/url.html new file mode 100644 index 0000000000000000000000000000000000000000..08b1e61c0b0d4aa528c173de4683ecf1e19b0053 --- /dev/null +++ b/testbed/django__django/django/forms/templates/django/forms/widgets/url.html @@ -0,0 +1 @@ +{% include "django/forms/widgets/input.html" %} diff --git a/testbed/django__django/django/forms/utils.py b/testbed/django__django/django/forms/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..f4fbf3e2418a23ff1933766ccdd669986237ef58 --- /dev/null +++ b/testbed/django__django/django/forms/utils.py @@ -0,0 +1,244 @@ +import json +from collections import UserList + +from django.conf import settings +from django.core.exceptions import ValidationError +from django.forms.renderers import get_default_renderer +from django.utils import timezone +from django.utils.html import escape, format_html_join +from django.utils.safestring import mark_safe +from django.utils.translation import gettext_lazy as _ + + +def pretty_name(name): + """Convert 'first_name' to 'First name'.""" + if not name: + return "" + return name.replace("_", " ").capitalize() + + +def flatatt(attrs): + """ + Convert a dictionary of attributes to a single string. + The returned string will contain a leading space followed by key="value", + XML-style pairs. In the case of a boolean value, the key will appear + without a value. It is assumed that the keys do not need to be + XML-escaped. If the passed dictionary is empty, then return an empty + string. + + The result is passed through 'mark_safe' (by way of 'format_html_join'). + """ + key_value_attrs = [] + boolean_attrs = [] + for attr, value in attrs.items(): + if isinstance(value, bool): + if value: + boolean_attrs.append((attr,)) + elif value is not None: + key_value_attrs.append((attr, value)) + + return format_html_join("", ' {}="{}"', sorted(key_value_attrs)) + format_html_join( + "", " {}", sorted(boolean_attrs) + ) + + +class RenderableMixin: + def get_context(self): + raise NotImplementedError( + "Subclasses of RenderableMixin must provide a get_context() method." + ) + + def render(self, template_name=None, context=None, renderer=None): + renderer = renderer or self.renderer + template = template_name or self.template_name + context = context or self.get_context() + return mark_safe(renderer.render(template, context)) + + __str__ = render + __html__ = render + + +class RenderableFieldMixin(RenderableMixin): + def as_field_group(self): + return self.render() + + def as_hidden(self): + raise NotImplementedError( + "Subclasses of RenderableFieldMixin must provide an as_hidden() method." + ) + + def as_widget(self): + raise NotImplementedError( + "Subclasses of RenderableFieldMixin must provide an as_widget() method." + ) + + def __str__(self): + """Render this field as an HTML widget.""" + if self.field.show_hidden_initial: + return self.as_widget() + self.as_hidden(only_initial=True) + return self.as_widget() + + __html__ = __str__ + + +class RenderableFormMixin(RenderableMixin): + def as_p(self): + """Render as

    elements.""" + return self.render(self.template_name_p) + + def as_table(self): + """Render as elements excluding the surrounding tag.""" + return self.render(self.template_name_table) + + def as_ul(self): + """Render as
  • elements excluding the surrounding
      tag.""" + return self.render(self.template_name_ul) + + def as_div(self): + """Render as
      elements.""" + return self.render(self.template_name_div) + + +class RenderableErrorMixin(RenderableMixin): + def as_json(self, escape_html=False): + return json.dumps(self.get_json_data(escape_html)) + + def as_text(self): + return self.render(self.template_name_text) + + def as_ul(self): + return self.render(self.template_name_ul) + + +class ErrorDict(dict, RenderableErrorMixin): + """ + A collection of errors that knows how to display itself in various formats. + + The dictionary keys are the field names, and the values are the errors. + """ + + template_name = "django/forms/errors/dict/default.html" + template_name_text = "django/forms/errors/dict/text.txt" + template_name_ul = "django/forms/errors/dict/ul.html" + + def __init__(self, *args, renderer=None, **kwargs): + super().__init__(*args, **kwargs) + self.renderer = renderer or get_default_renderer() + + def as_data(self): + return {f: e.as_data() for f, e in self.items()} + + def get_json_data(self, escape_html=False): + return {f: e.get_json_data(escape_html) for f, e in self.items()} + + def get_context(self): + return { + "errors": self.items(), + "error_class": "errorlist", + } + + +class ErrorList(UserList, list, RenderableErrorMixin): + """ + A collection of errors that knows how to display itself in various formats. + """ + + template_name = "django/forms/errors/list/default.html" + template_name_text = "django/forms/errors/list/text.txt" + template_name_ul = "django/forms/errors/list/ul.html" + + def __init__(self, initlist=None, error_class=None, renderer=None): + super().__init__(initlist) + + if error_class is None: + self.error_class = "errorlist" + else: + self.error_class = "errorlist {}".format(error_class) + self.renderer = renderer or get_default_renderer() + + def as_data(self): + return ValidationError(self.data).error_list + + def copy(self): + copy = super().copy() + copy.error_class = self.error_class + return copy + + def get_json_data(self, escape_html=False): + errors = [] + for error in self.as_data(): + message = next(iter(error)) + errors.append( + { + "message": escape(message) if escape_html else message, + "code": error.code or "", + } + ) + return errors + + def get_context(self): + return { + "errors": self, + "error_class": self.error_class, + } + + def __repr__(self): + return repr(list(self)) + + def __contains__(self, item): + return item in list(self) + + def __eq__(self, other): + return list(self) == other + + def __getitem__(self, i): + error = self.data[i] + if isinstance(error, ValidationError): + return next(iter(error)) + return error + + def __reduce_ex__(self, *args, **kwargs): + # The `list` reduce function returns an iterator as the fourth element + # that is normally used for repopulating. Since we only inherit from + # `list` for `isinstance` backward compatibility (Refs #17413) we + # nullify this iterator as it would otherwise result in duplicate + # entries. (Refs #23594) + info = super(UserList, self).__reduce_ex__(*args, **kwargs) + return info[:3] + (None, None) + + +# Utilities for time zone support in DateTimeField et al. + + +def from_current_timezone(value): + """ + When time zone support is enabled, convert naive datetimes + entered in the current time zone to aware datetimes. + """ + if settings.USE_TZ and value is not None and timezone.is_naive(value): + current_timezone = timezone.get_current_timezone() + try: + if timezone._datetime_ambiguous_or_imaginary(value, current_timezone): + raise ValueError("Ambiguous or non-existent time.") + return timezone.make_aware(value, current_timezone) + except Exception as exc: + raise ValidationError( + _( + "%(datetime)s couldn’t be interpreted " + "in time zone %(current_timezone)s; it " + "may be ambiguous or it may not exist." + ), + code="ambiguous_timezone", + params={"datetime": value, "current_timezone": current_timezone}, + ) from exc + return value + + +def to_current_timezone(value): + """ + When time zone support is enabled, convert aware datetimes + to naive datetimes in the current time zone for display. + """ + if settings.USE_TZ and value is not None and timezone.is_aware(value): + return timezone.make_naive(value) + return value diff --git a/testbed/django__django/django/forms/widgets.py b/testbed/django__django/django/forms/widgets.py new file mode 100644 index 0000000000000000000000000000000000000000..ab7c0f755f0598d2f6fa557edd84e9b31d250c27 --- /dev/null +++ b/testbed/django__django/django/forms/widgets.py @@ -0,0 +1,1200 @@ +""" +HTML Widget classes +""" + +import copy +import datetime +import warnings +from collections import defaultdict +from graphlib import CycleError, TopologicalSorter +from itertools import chain + +from django.forms.utils import to_current_timezone +from django.templatetags.static import static +from django.utils import formats +from django.utils.dates import MONTHS +from django.utils.formats import get_format +from django.utils.html import format_html, html_safe +from django.utils.regex_helper import _lazy_re_compile +from django.utils.safestring import mark_safe +from django.utils.translation import gettext_lazy as _ + +from .renderers import get_default_renderer + +__all__ = ( + "Media", + "MediaDefiningClass", + "Widget", + "TextInput", + "NumberInput", + "EmailInput", + "URLInput", + "PasswordInput", + "HiddenInput", + "MultipleHiddenInput", + "FileInput", + "ClearableFileInput", + "Textarea", + "DateInput", + "DateTimeInput", + "TimeInput", + "CheckboxInput", + "Select", + "NullBooleanSelect", + "SelectMultiple", + "RadioSelect", + "CheckboxSelectMultiple", + "MultiWidget", + "SplitDateTimeWidget", + "SplitHiddenDateTimeWidget", + "SelectDateWidget", +) + +MEDIA_TYPES = ("css", "js") + + +class MediaOrderConflictWarning(RuntimeWarning): + pass + + +@html_safe +class Media: + def __init__(self, media=None, css=None, js=None): + if media is not None: + css = getattr(media, "css", {}) + js = getattr(media, "js", []) + else: + if css is None: + css = {} + if js is None: + js = [] + self._css_lists = [css] + self._js_lists = [js] + + def __repr__(self): + return "Media(css=%r, js=%r)" % (self._css, self._js) + + def __str__(self): + return self.render() + + @property + def _css(self): + css = defaultdict(list) + for css_list in self._css_lists: + for medium, sublist in css_list.items(): + css[medium].append(sublist) + return {medium: self.merge(*lists) for medium, lists in css.items()} + + @property + def _js(self): + return self.merge(*self._js_lists) + + def render(self): + return mark_safe( + "\n".join( + chain.from_iterable( + getattr(self, "render_" + name)() for name in MEDIA_TYPES + ) + ) + ) + + def render_js(self): + return [ + path.__html__() + if hasattr(path, "__html__") + else format_html('', self.absolute_path(path)) + for path in self._js + ] + + def render_css(self): + # To keep rendering order consistent, we can't just iterate over items(). + # We need to sort the keys, and iterate over the sorted list. + media = sorted(self._css) + return chain.from_iterable( + [ + path.__html__() + if hasattr(path, "__html__") + else format_html( + '', + self.absolute_path(path), + medium, + ) + for path in self._css[medium] + ] + for medium in media + ) + + def absolute_path(self, path): + """ + Given a relative or absolute path to a static asset, return an absolute + path. An absolute path will be returned unchanged while a relative path + will be passed to django.templatetags.static.static(). + """ + if path.startswith(("http://", "https://", "/")): + return path + return static(path) + + def __getitem__(self, name): + """Return a Media object that only contains media of the given type.""" + if name in MEDIA_TYPES: + return Media(**{str(name): getattr(self, "_" + name)}) + raise KeyError('Unknown media type "%s"' % name) + + @staticmethod + def merge(*lists): + """ + Merge lists while trying to keep the relative order of the elements. + Warn if the lists have the same elements in a different relative order. + + For static assets it can be important to have them included in the DOM + in a certain order. In JavaScript you may not be able to reference a + global or in CSS you might want to override a style. + """ + ts = TopologicalSorter() + for head, *tail in filter(None, lists): + ts.add(head) # Ensure that the first items are included. + for item in tail: + if head != item: # Avoid circular dependency to self. + ts.add(item, head) + head = item + try: + return list(ts.static_order()) + except CycleError: + warnings.warn( + "Detected duplicate Media files in an opposite order: {}".format( + ", ".join(repr(list_) for list_ in lists) + ), + MediaOrderConflictWarning, + ) + return list(dict.fromkeys(chain.from_iterable(filter(None, lists)))) + + def __add__(self, other): + combined = Media() + combined._css_lists = self._css_lists[:] + combined._js_lists = self._js_lists[:] + for item in other._css_lists: + if item and item not in self._css_lists: + combined._css_lists.append(item) + for item in other._js_lists: + if item and item not in self._js_lists: + combined._js_lists.append(item) + return combined + + +def media_property(cls): + def _media(self): + # Get the media property of the superclass, if it exists + sup_cls = super(cls, self) + try: + base = sup_cls.media + except AttributeError: + base = Media() + + # Get the media definition for this class + definition = getattr(cls, "Media", None) + if definition: + extend = getattr(definition, "extend", True) + if extend: + if extend is True: + m = base + else: + m = Media() + for medium in extend: + m += base[medium] + return m + Media(definition) + return Media(definition) + return base + + return property(_media) + + +class MediaDefiningClass(type): + """ + Metaclass for classes that can have media definitions. + """ + + def __new__(mcs, name, bases, attrs): + new_class = super().__new__(mcs, name, bases, attrs) + + if "media" not in attrs: + new_class.media = media_property(new_class) + + return new_class + + +class Widget(metaclass=MediaDefiningClass): + needs_multipart_form = False # Determines does this widget need multipart form + is_localized = False + is_required = False + supports_microseconds = True + use_fieldset = False + + def __init__(self, attrs=None): + self.attrs = {} if attrs is None else attrs.copy() + + def __deepcopy__(self, memo): + obj = copy.copy(self) + obj.attrs = self.attrs.copy() + memo[id(self)] = obj + return obj + + @property + def is_hidden(self): + return self.input_type == "hidden" if hasattr(self, "input_type") else False + + def subwidgets(self, name, value, attrs=None): + context = self.get_context(name, value, attrs) + yield context["widget"] + + def format_value(self, value): + """ + Return a value as it should appear when rendered in a template. + """ + if value == "" or value is None: + return None + if self.is_localized: + return formats.localize_input(value) + return str(value) + + def get_context(self, name, value, attrs): + return { + "widget": { + "name": name, + "is_hidden": self.is_hidden, + "required": self.is_required, + "value": self.format_value(value), + "attrs": self.build_attrs(self.attrs, attrs), + "template_name": self.template_name, + }, + } + + def render(self, name, value, attrs=None, renderer=None): + """Render the widget as an HTML string.""" + context = self.get_context(name, value, attrs) + return self._render(self.template_name, context, renderer) + + def _render(self, template_name, context, renderer=None): + if renderer is None: + renderer = get_default_renderer() + return mark_safe(renderer.render(template_name, context)) + + def build_attrs(self, base_attrs, extra_attrs=None): + """Build an attribute dictionary.""" + return {**base_attrs, **(extra_attrs or {})} + + def value_from_datadict(self, data, files, name): + """ + Given a dictionary of data and this widget's name, return the value + of this widget or None if it's not provided. + """ + return data.get(name) + + def value_omitted_from_data(self, data, files, name): + return name not in data + + def id_for_label(self, id_): + """ + Return the HTML ID attribute of this Widget for use by a