Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po +39 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/apps.py +13 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/checks.py +14 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/finders.py +290 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/handlers.py +67 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/collectstatic.py +355 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/findstatic.py +43 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/runserver.py +32 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/storage.py +497 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/templatetags/staticfiles.py +19 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/testing.py +13 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/urls.py +19 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/utils.py +59 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/views.py +39 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/__init__.py +1 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/apps.py +7 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/views.py +218 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/__init__.py +125 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/base.py +276 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/db.py +208 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/dummy.py +43 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/filebased.py +143 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/locmem.py +131 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/memcached.py +200 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/utils.py +14 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/__init__.py +23 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/caches.py +16 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/database.py +11 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/messages.py +75 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/model_checks.py +154 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/registry.py +98 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/base.py +210 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/csrf.py +40 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/sessions.py +97 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/templates.py +38 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/urls.py +110 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/exceptions.py +183 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/__init__.py +3 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/base.py +169 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/images.py +80 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/locks.py +113 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/move.py +87 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/storage.py +364 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/temp.py +74 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/uploadedfile.py +118 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/uploadhandler.py +208 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/utils.py +52 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/handlers/base.py +171 -0
- A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/handlers/exception.py +126 -0
.gitattributes
CHANGED
|
@@ -152,3 +152,4 @@ A-news-Agrregation-system-master/myvenv/share/python-wheels/pip-9.0.1-py2.py3-no
|
|
| 152 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 153 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/setuptools-39.0.1-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 154 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/urllib3-1.22-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 152 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 153 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/setuptools-39.0.1-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 154 |
A-news-Agrregation-system-master/myvenv/share/python-wheels/urllib3-1.22-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
|
| 155 |
+
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/pip/_vendor/distlib/t64.exe filter=lfs diff=lfs merge=lfs -text
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/sites/locale/zh_Hant/LC_MESSAGES/django.po
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is distributed under the same license as the Django package.
|
| 2 |
+
#
|
| 3 |
+
# Translators:
|
| 4 |
+
# Chen Chun-Chia <ccc.larc@gmail.com>, 2015
|
| 5 |
+
# Jannis Leidel <jannis@leidel.info>, 2011
|
| 6 |
+
# mail6543210 <mail6543210@yahoo.com.tw>, 2013
|
| 7 |
+
# Tzu-ping Chung <uranusjr@gmail.com>, 2016
|
| 8 |
+
msgid ""
|
| 9 |
+
msgstr ""
|
| 10 |
+
"Project-Id-Version: django\n"
|
| 11 |
+
"Report-Msgid-Bugs-To: \n"
|
| 12 |
+
"POT-Creation-Date: 2015-01-17 11:07+0100\n"
|
| 13 |
+
"PO-Revision-Date: 2017-09-19 16:40+0000\n"
|
| 14 |
+
"Last-Translator: Tzu-ping Chung <uranusjr@gmail.com>\n"
|
| 15 |
+
"Language-Team: Chinese (Taiwan) (http://www.transifex.com/django/django/"
|
| 16 |
+
"language/zh_TW/)\n"
|
| 17 |
+
"MIME-Version: 1.0\n"
|
| 18 |
+
"Content-Type: text/plain; charset=UTF-8\n"
|
| 19 |
+
"Content-Transfer-Encoding: 8bit\n"
|
| 20 |
+
"Language: zh_TW\n"
|
| 21 |
+
"Plural-Forms: nplurals=1; plural=0;\n"
|
| 22 |
+
|
| 23 |
+
msgid "Sites"
|
| 24 |
+
msgstr "網站"
|
| 25 |
+
|
| 26 |
+
msgid "The domain name cannot contain any spaces or tabs."
|
| 27 |
+
msgstr "網域名稱不能包含空格或定位字元。"
|
| 28 |
+
|
| 29 |
+
msgid "domain name"
|
| 30 |
+
msgstr "網域名稱"
|
| 31 |
+
|
| 32 |
+
msgid "display name"
|
| 33 |
+
msgstr "顯示名稱"
|
| 34 |
+
|
| 35 |
+
msgid "site"
|
| 36 |
+
msgstr "網站"
|
| 37 |
+
|
| 38 |
+
msgid "sites"
|
| 39 |
+
msgstr "網站"
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/apps.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.apps import AppConfig
|
| 2 |
+
from django.contrib.staticfiles.checks import check_finders
|
| 3 |
+
from django.core import checks
|
| 4 |
+
from django.utils.translation import gettext_lazy as _
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class StaticFilesConfig(AppConfig):
|
| 8 |
+
name = 'django.contrib.staticfiles'
|
| 9 |
+
verbose_name = _("Static Files")
|
| 10 |
+
ignore_patterns = ['CVS', '.*', '*~']
|
| 11 |
+
|
| 12 |
+
def ready(self):
|
| 13 |
+
checks.register(check_finders, 'staticfiles')
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/checks.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.contrib.staticfiles.finders import get_finders
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def check_finders(app_configs=None, **kwargs):
|
| 5 |
+
"""Check all registered staticfiles finders."""
|
| 6 |
+
errors = []
|
| 7 |
+
for finder in get_finders():
|
| 8 |
+
try:
|
| 9 |
+
finder_errors = finder.check()
|
| 10 |
+
except NotImplementedError:
|
| 11 |
+
pass
|
| 12 |
+
else:
|
| 13 |
+
errors.extend(finder_errors)
|
| 14 |
+
return errors
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/finders.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
from collections import OrderedDict
|
| 4 |
+
|
| 5 |
+
from django.apps import apps
|
| 6 |
+
from django.conf import settings
|
| 7 |
+
from django.contrib.staticfiles import utils
|
| 8 |
+
from django.core.checks import Error
|
| 9 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 10 |
+
from django.core.files.storage import (
|
| 11 |
+
FileSystemStorage, Storage, default_storage,
|
| 12 |
+
)
|
| 13 |
+
from django.utils._os import safe_join
|
| 14 |
+
from django.utils.functional import LazyObject, empty
|
| 15 |
+
from django.utils.module_loading import import_string
|
| 16 |
+
|
| 17 |
+
# To keep track on which directories the finder has searched the static files.
|
| 18 |
+
searched_locations = []
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class BaseFinder:
|
| 22 |
+
"""
|
| 23 |
+
A base file finder to be used for custom staticfiles finder classes.
|
| 24 |
+
"""
|
| 25 |
+
def check(self, **kwargs):
|
| 26 |
+
raise NotImplementedError(
|
| 27 |
+
'subclasses may provide a check() method to verify the finder is '
|
| 28 |
+
'configured correctly.'
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
def find(self, path, all=False):
|
| 32 |
+
"""
|
| 33 |
+
Given a relative file path, find an absolute file path.
|
| 34 |
+
|
| 35 |
+
If the ``all`` parameter is False (default) return only the first found
|
| 36 |
+
file path; if True, return a list of all found files paths.
|
| 37 |
+
"""
|
| 38 |
+
raise NotImplementedError('subclasses of BaseFinder must provide a find() method')
|
| 39 |
+
|
| 40 |
+
def list(self, ignore_patterns):
|
| 41 |
+
"""
|
| 42 |
+
Given an optional list of paths to ignore, return a two item iterable
|
| 43 |
+
consisting of the relative path and storage instance.
|
| 44 |
+
"""
|
| 45 |
+
raise NotImplementedError('subclasses of BaseFinder must provide a list() method')
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class FileSystemFinder(BaseFinder):
|
| 49 |
+
"""
|
| 50 |
+
A static files finder that uses the ``STATICFILES_DIRS`` setting
|
| 51 |
+
to locate files.
|
| 52 |
+
"""
|
| 53 |
+
def __init__(self, app_names=None, *args, **kwargs):
|
| 54 |
+
# List of locations with static files
|
| 55 |
+
self.locations = []
|
| 56 |
+
# Maps dir paths to an appropriate storage instance
|
| 57 |
+
self.storages = OrderedDict()
|
| 58 |
+
for root in settings.STATICFILES_DIRS:
|
| 59 |
+
if isinstance(root, (list, tuple)):
|
| 60 |
+
prefix, root = root
|
| 61 |
+
else:
|
| 62 |
+
prefix = ''
|
| 63 |
+
if (prefix, root) not in self.locations:
|
| 64 |
+
self.locations.append((prefix, root))
|
| 65 |
+
for prefix, root in self.locations:
|
| 66 |
+
filesystem_storage = FileSystemStorage(location=root)
|
| 67 |
+
filesystem_storage.prefix = prefix
|
| 68 |
+
self.storages[root] = filesystem_storage
|
| 69 |
+
super().__init__(*args, **kwargs)
|
| 70 |
+
|
| 71 |
+
def check(self, **kwargs):
|
| 72 |
+
errors = []
|
| 73 |
+
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
|
| 74 |
+
errors.append(Error(
|
| 75 |
+
'The STATICFILES_DIRS setting is not a tuple or list.',
|
| 76 |
+
hint='Perhaps you forgot a trailing comma?',
|
| 77 |
+
id='staticfiles.E001',
|
| 78 |
+
))
|
| 79 |
+
for root in settings.STATICFILES_DIRS:
|
| 80 |
+
if isinstance(root, (list, tuple)):
|
| 81 |
+
_, root = root
|
| 82 |
+
if settings.STATIC_ROOT and os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root):
|
| 83 |
+
errors.append(Error(
|
| 84 |
+
'The STATICFILES_DIRS setting should not contain the '
|
| 85 |
+
'STATIC_ROOT setting.',
|
| 86 |
+
id='staticfiles.E002',
|
| 87 |
+
))
|
| 88 |
+
return errors
|
| 89 |
+
|
| 90 |
+
def find(self, path, all=False):
|
| 91 |
+
"""
|
| 92 |
+
Look for files in the extra locations as defined in STATICFILES_DIRS.
|
| 93 |
+
"""
|
| 94 |
+
matches = []
|
| 95 |
+
for prefix, root in self.locations:
|
| 96 |
+
if root not in searched_locations:
|
| 97 |
+
searched_locations.append(root)
|
| 98 |
+
matched_path = self.find_location(root, path, prefix)
|
| 99 |
+
if matched_path:
|
| 100 |
+
if not all:
|
| 101 |
+
return matched_path
|
| 102 |
+
matches.append(matched_path)
|
| 103 |
+
return matches
|
| 104 |
+
|
| 105 |
+
def find_location(self, root, path, prefix=None):
|
| 106 |
+
"""
|
| 107 |
+
Find a requested static file in a location and return the found
|
| 108 |
+
absolute path (or ``None`` if no match).
|
| 109 |
+
"""
|
| 110 |
+
if prefix:
|
| 111 |
+
prefix = '%s%s' % (prefix, os.sep)
|
| 112 |
+
if not path.startswith(prefix):
|
| 113 |
+
return None
|
| 114 |
+
path = path[len(prefix):]
|
| 115 |
+
path = safe_join(root, path)
|
| 116 |
+
if os.path.exists(path):
|
| 117 |
+
return path
|
| 118 |
+
|
| 119 |
+
def list(self, ignore_patterns):
|
| 120 |
+
"""
|
| 121 |
+
List all files in all locations.
|
| 122 |
+
"""
|
| 123 |
+
for prefix, root in self.locations:
|
| 124 |
+
storage = self.storages[root]
|
| 125 |
+
for path in utils.get_files(storage, ignore_patterns):
|
| 126 |
+
yield path, storage
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class AppDirectoriesFinder(BaseFinder):
|
| 130 |
+
"""
|
| 131 |
+
A static files finder that looks in the directory of each app as
|
| 132 |
+
specified in the source_dir attribute.
|
| 133 |
+
"""
|
| 134 |
+
storage_class = FileSystemStorage
|
| 135 |
+
source_dir = 'static'
|
| 136 |
+
|
| 137 |
+
def __init__(self, app_names=None, *args, **kwargs):
|
| 138 |
+
# The list of apps that are handled
|
| 139 |
+
self.apps = []
|
| 140 |
+
# Mapping of app names to storage instances
|
| 141 |
+
self.storages = OrderedDict()
|
| 142 |
+
app_configs = apps.get_app_configs()
|
| 143 |
+
if app_names:
|
| 144 |
+
app_names = set(app_names)
|
| 145 |
+
app_configs = [ac for ac in app_configs if ac.name in app_names]
|
| 146 |
+
for app_config in app_configs:
|
| 147 |
+
app_storage = self.storage_class(
|
| 148 |
+
os.path.join(app_config.path, self.source_dir))
|
| 149 |
+
if os.path.isdir(app_storage.location):
|
| 150 |
+
self.storages[app_config.name] = app_storage
|
| 151 |
+
if app_config.name not in self.apps:
|
| 152 |
+
self.apps.append(app_config.name)
|
| 153 |
+
super().__init__(*args, **kwargs)
|
| 154 |
+
|
| 155 |
+
def list(self, ignore_patterns):
|
| 156 |
+
"""
|
| 157 |
+
List all files in all app storages.
|
| 158 |
+
"""
|
| 159 |
+
for storage in self.storages.values():
|
| 160 |
+
if storage.exists(''): # check if storage location exists
|
| 161 |
+
for path in utils.get_files(storage, ignore_patterns):
|
| 162 |
+
yield path, storage
|
| 163 |
+
|
| 164 |
+
def find(self, path, all=False):
|
| 165 |
+
"""
|
| 166 |
+
Look for files in the app directories.
|
| 167 |
+
"""
|
| 168 |
+
matches = []
|
| 169 |
+
for app in self.apps:
|
| 170 |
+
app_location = self.storages[app].location
|
| 171 |
+
if app_location not in searched_locations:
|
| 172 |
+
searched_locations.append(app_location)
|
| 173 |
+
match = self.find_in_app(app, path)
|
| 174 |
+
if match:
|
| 175 |
+
if not all:
|
| 176 |
+
return match
|
| 177 |
+
matches.append(match)
|
| 178 |
+
return matches
|
| 179 |
+
|
| 180 |
+
def find_in_app(self, app, path):
|
| 181 |
+
"""
|
| 182 |
+
Find a requested static file in an app's static locations.
|
| 183 |
+
"""
|
| 184 |
+
storage = self.storages.get(app)
|
| 185 |
+
if storage:
|
| 186 |
+
# only try to find a file if the source dir actually exists
|
| 187 |
+
if storage.exists(path):
|
| 188 |
+
matched_path = storage.path(path)
|
| 189 |
+
if matched_path:
|
| 190 |
+
return matched_path
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
class BaseStorageFinder(BaseFinder):
|
| 194 |
+
"""
|
| 195 |
+
A base static files finder to be used to extended
|
| 196 |
+
with an own storage class.
|
| 197 |
+
"""
|
| 198 |
+
storage = None
|
| 199 |
+
|
| 200 |
+
def __init__(self, storage=None, *args, **kwargs):
|
| 201 |
+
if storage is not None:
|
| 202 |
+
self.storage = storage
|
| 203 |
+
if self.storage is None:
|
| 204 |
+
raise ImproperlyConfigured("The staticfiles storage finder %r "
|
| 205 |
+
"doesn't have a storage class "
|
| 206 |
+
"assigned." % self.__class__)
|
| 207 |
+
# Make sure we have a storage instance here.
|
| 208 |
+
if not isinstance(self.storage, (Storage, LazyObject)):
|
| 209 |
+
self.storage = self.storage()
|
| 210 |
+
super().__init__(*args, **kwargs)
|
| 211 |
+
|
| 212 |
+
def find(self, path, all=False):
|
| 213 |
+
"""
|
| 214 |
+
Look for files in the default file storage, if it's local.
|
| 215 |
+
"""
|
| 216 |
+
try:
|
| 217 |
+
self.storage.path('')
|
| 218 |
+
except NotImplementedError:
|
| 219 |
+
pass
|
| 220 |
+
else:
|
| 221 |
+
if self.storage.location not in searched_locations:
|
| 222 |
+
searched_locations.append(self.storage.location)
|
| 223 |
+
if self.storage.exists(path):
|
| 224 |
+
match = self.storage.path(path)
|
| 225 |
+
if all:
|
| 226 |
+
match = [match]
|
| 227 |
+
return match
|
| 228 |
+
return []
|
| 229 |
+
|
| 230 |
+
def list(self, ignore_patterns):
|
| 231 |
+
"""
|
| 232 |
+
List all files of the storage.
|
| 233 |
+
"""
|
| 234 |
+
for path in utils.get_files(self.storage, ignore_patterns):
|
| 235 |
+
yield path, self.storage
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class DefaultStorageFinder(BaseStorageFinder):
|
| 239 |
+
"""
|
| 240 |
+
A static files finder that uses the default storage backend.
|
| 241 |
+
"""
|
| 242 |
+
storage = default_storage
|
| 243 |
+
|
| 244 |
+
def __init__(self, *args, **kwargs):
|
| 245 |
+
super().__init__(*args, **kwargs)
|
| 246 |
+
base_location = getattr(self.storage, 'base_location', empty)
|
| 247 |
+
if not base_location:
|
| 248 |
+
raise ImproperlyConfigured("The storage backend of the "
|
| 249 |
+
"staticfiles finder %r doesn't have "
|
| 250 |
+
"a valid location." % self.__class__)
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
def find(path, all=False):
|
| 254 |
+
"""
|
| 255 |
+
Find a static file with the given path using all enabled finders.
|
| 256 |
+
|
| 257 |
+
If ``all`` is ``False`` (default), return the first matching
|
| 258 |
+
absolute path (or ``None`` if no match). Otherwise return a list.
|
| 259 |
+
"""
|
| 260 |
+
searched_locations[:] = []
|
| 261 |
+
matches = []
|
| 262 |
+
for finder in get_finders():
|
| 263 |
+
result = finder.find(path, all=all)
|
| 264 |
+
if not all and result:
|
| 265 |
+
return result
|
| 266 |
+
if not isinstance(result, (list, tuple)):
|
| 267 |
+
result = [result]
|
| 268 |
+
matches.extend(result)
|
| 269 |
+
if matches:
|
| 270 |
+
return matches
|
| 271 |
+
# No match.
|
| 272 |
+
return [] if all else None
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def get_finders():
|
| 276 |
+
for finder_path in settings.STATICFILES_FINDERS:
|
| 277 |
+
yield get_finder(finder_path)
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
@functools.lru_cache(maxsize=None)
|
| 281 |
+
def get_finder(import_path):
|
| 282 |
+
"""
|
| 283 |
+
Import the staticfiles finder class described by import_path, where
|
| 284 |
+
import_path is the full Python path to the class.
|
| 285 |
+
"""
|
| 286 |
+
Finder = import_string(import_path)
|
| 287 |
+
if not issubclass(Finder, BaseFinder):
|
| 288 |
+
raise ImproperlyConfigured('Finder "%s" is not a subclass of "%s"' %
|
| 289 |
+
(Finder, BaseFinder))
|
| 290 |
+
return Finder()
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/handlers.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from urllib.parse import urlparse
|
| 2 |
+
from urllib.request import url2pathname
|
| 3 |
+
|
| 4 |
+
from django.conf import settings
|
| 5 |
+
from django.contrib.staticfiles import utils
|
| 6 |
+
from django.contrib.staticfiles.views import serve
|
| 7 |
+
from django.core.handlers.wsgi import WSGIHandler, get_path_info
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class StaticFilesHandler(WSGIHandler):
|
| 11 |
+
"""
|
| 12 |
+
WSGI middleware that intercepts calls to the static files directory, as
|
| 13 |
+
defined by the STATIC_URL setting, and serves those files.
|
| 14 |
+
"""
|
| 15 |
+
# May be used to differentiate between handler types (e.g. in a
|
| 16 |
+
# request_finished signal)
|
| 17 |
+
handles_files = True
|
| 18 |
+
|
| 19 |
+
def __init__(self, application):
|
| 20 |
+
self.application = application
|
| 21 |
+
self.base_url = urlparse(self.get_base_url())
|
| 22 |
+
super().__init__()
|
| 23 |
+
|
| 24 |
+
def load_middleware(self):
|
| 25 |
+
# Middleware are already loaded for self.application; no need to reload
|
| 26 |
+
# them for self.
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
def get_base_url(self):
|
| 30 |
+
utils.check_settings()
|
| 31 |
+
return settings.STATIC_URL
|
| 32 |
+
|
| 33 |
+
def _should_handle(self, path):
|
| 34 |
+
"""
|
| 35 |
+
Check if the path should be handled. Ignore the path if:
|
| 36 |
+
* the host is provided as part of the base_url
|
| 37 |
+
* the request's path isn't under the media path (or equal)
|
| 38 |
+
"""
|
| 39 |
+
return path.startswith(self.base_url[2]) and not self.base_url[1]
|
| 40 |
+
|
| 41 |
+
def file_path(self, url):
|
| 42 |
+
"""
|
| 43 |
+
Return the relative path to the media file on disk for the given URL.
|
| 44 |
+
"""
|
| 45 |
+
relative_url = url[len(self.base_url[2]):]
|
| 46 |
+
return url2pathname(relative_url)
|
| 47 |
+
|
| 48 |
+
def serve(self, request):
|
| 49 |
+
"""Serve the request path."""
|
| 50 |
+
return serve(request, self.file_path(request.path), insecure=True)
|
| 51 |
+
|
| 52 |
+
def get_response(self, request):
|
| 53 |
+
from django.http import Http404
|
| 54 |
+
|
| 55 |
+
if self._should_handle(request.path):
|
| 56 |
+
try:
|
| 57 |
+
return self.serve(request)
|
| 58 |
+
except Http404 as e:
|
| 59 |
+
if settings.DEBUG:
|
| 60 |
+
from django.views import debug
|
| 61 |
+
return debug.technical_404_response(request, e)
|
| 62 |
+
return super().get_response(request)
|
| 63 |
+
|
| 64 |
+
def __call__(self, environ, start_response):
|
| 65 |
+
if not self._should_handle(get_path_info(environ)):
|
| 66 |
+
return self.application(environ, start_response)
|
| 67 |
+
return super().__call__(environ, start_response)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/collectstatic.py
ADDED
|
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from collections import OrderedDict
|
| 3 |
+
|
| 4 |
+
from django.apps import apps
|
| 5 |
+
from django.contrib.staticfiles.finders import get_finders
|
| 6 |
+
from django.contrib.staticfiles.storage import staticfiles_storage
|
| 7 |
+
from django.core.files.storage import FileSystemStorage
|
| 8 |
+
from django.core.management.base import BaseCommand, CommandError
|
| 9 |
+
from django.core.management.color import no_style
|
| 10 |
+
from django.utils.functional import cached_property
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Command(BaseCommand):
|
| 14 |
+
"""
|
| 15 |
+
Copies or symlinks static files from different locations to the
|
| 16 |
+
settings.STATIC_ROOT.
|
| 17 |
+
"""
|
| 18 |
+
help = "Collect static files in a single location."
|
| 19 |
+
requires_system_checks = False
|
| 20 |
+
|
| 21 |
+
def __init__(self, *args, **kwargs):
|
| 22 |
+
super().__init__(*args, **kwargs)
|
| 23 |
+
self.copied_files = []
|
| 24 |
+
self.symlinked_files = []
|
| 25 |
+
self.unmodified_files = []
|
| 26 |
+
self.post_processed_files = []
|
| 27 |
+
self.storage = staticfiles_storage
|
| 28 |
+
self.style = no_style()
|
| 29 |
+
|
| 30 |
+
@cached_property
|
| 31 |
+
def local(self):
|
| 32 |
+
try:
|
| 33 |
+
self.storage.path('')
|
| 34 |
+
except NotImplementedError:
|
| 35 |
+
return False
|
| 36 |
+
return True
|
| 37 |
+
|
| 38 |
+
def add_arguments(self, parser):
|
| 39 |
+
parser.add_argument(
|
| 40 |
+
'--noinput', '--no-input', action='store_false', dest='interactive',
|
| 41 |
+
help="Do NOT prompt the user for input of any kind.",
|
| 42 |
+
)
|
| 43 |
+
parser.add_argument(
|
| 44 |
+
'--no-post-process', action='store_false', dest='post_process',
|
| 45 |
+
help="Do NOT post process collected files.",
|
| 46 |
+
)
|
| 47 |
+
parser.add_argument(
|
| 48 |
+
'-i', '--ignore', action='append', default=[],
|
| 49 |
+
dest='ignore_patterns', metavar='PATTERN',
|
| 50 |
+
help="Ignore files or directories matching this glob-style "
|
| 51 |
+
"pattern. Use multiple times to ignore more.",
|
| 52 |
+
)
|
| 53 |
+
parser.add_argument(
|
| 54 |
+
'-n', '--dry-run', action='store_true', dest='dry_run',
|
| 55 |
+
help="Do everything except modify the filesystem.",
|
| 56 |
+
)
|
| 57 |
+
parser.add_argument(
|
| 58 |
+
'-c', '--clear', action='store_true', dest='clear',
|
| 59 |
+
help="Clear the existing files using the storage "
|
| 60 |
+
"before trying to copy or link the original file.",
|
| 61 |
+
)
|
| 62 |
+
parser.add_argument(
|
| 63 |
+
'-l', '--link', action='store_true', dest='link',
|
| 64 |
+
help="Create a symbolic link to each file instead of copying.",
|
| 65 |
+
)
|
| 66 |
+
parser.add_argument(
|
| 67 |
+
'--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
|
| 68 |
+
help="Don't ignore the common private glob-style patterns (defaults to 'CVS', '.*' and '*~').",
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
def set_options(self, **options):
|
| 72 |
+
"""
|
| 73 |
+
Set instance variables based on an options dict
|
| 74 |
+
"""
|
| 75 |
+
self.interactive = options['interactive']
|
| 76 |
+
self.verbosity = options['verbosity']
|
| 77 |
+
self.symlink = options['link']
|
| 78 |
+
self.clear = options['clear']
|
| 79 |
+
self.dry_run = options['dry_run']
|
| 80 |
+
ignore_patterns = options['ignore_patterns']
|
| 81 |
+
if options['use_default_ignore_patterns']:
|
| 82 |
+
ignore_patterns += apps.get_app_config('staticfiles').ignore_patterns
|
| 83 |
+
self.ignore_patterns = list(set(ignore_patterns))
|
| 84 |
+
self.post_process = options['post_process']
|
| 85 |
+
|
| 86 |
+
def collect(self):
|
| 87 |
+
"""
|
| 88 |
+
Perform the bulk of the work of collectstatic.
|
| 89 |
+
|
| 90 |
+
Split off from handle() to facilitate testing.
|
| 91 |
+
"""
|
| 92 |
+
if self.symlink and not self.local:
|
| 93 |
+
raise CommandError("Can't symlink to a remote destination.")
|
| 94 |
+
|
| 95 |
+
if self.clear:
|
| 96 |
+
self.clear_dir('')
|
| 97 |
+
|
| 98 |
+
if self.symlink:
|
| 99 |
+
handler = self.link_file
|
| 100 |
+
else:
|
| 101 |
+
handler = self.copy_file
|
| 102 |
+
|
| 103 |
+
found_files = OrderedDict()
|
| 104 |
+
for finder in get_finders():
|
| 105 |
+
for path, storage in finder.list(self.ignore_patterns):
|
| 106 |
+
# Prefix the relative path if the source storage contains it
|
| 107 |
+
if getattr(storage, 'prefix', None):
|
| 108 |
+
prefixed_path = os.path.join(storage.prefix, path)
|
| 109 |
+
else:
|
| 110 |
+
prefixed_path = path
|
| 111 |
+
|
| 112 |
+
if prefixed_path not in found_files:
|
| 113 |
+
found_files[prefixed_path] = (storage, path)
|
| 114 |
+
handler(path, prefixed_path, storage)
|
| 115 |
+
else:
|
| 116 |
+
self.log(
|
| 117 |
+
"Found another file with the destination path '%s'. It "
|
| 118 |
+
"will be ignored since only the first encountered file "
|
| 119 |
+
"is collected. If this is not what you want, make sure "
|
| 120 |
+
"every static file has a unique path." % prefixed_path,
|
| 121 |
+
level=1,
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
# Here we check if the storage backend has a post_process
|
| 125 |
+
# method and pass it the list of modified files.
|
| 126 |
+
if self.post_process and hasattr(self.storage, 'post_process'):
|
| 127 |
+
processor = self.storage.post_process(found_files,
|
| 128 |
+
dry_run=self.dry_run)
|
| 129 |
+
for original_path, processed_path, processed in processor:
|
| 130 |
+
if isinstance(processed, Exception):
|
| 131 |
+
self.stderr.write("Post-processing '%s' failed!" % original_path)
|
| 132 |
+
# Add a blank line before the traceback, otherwise it's
|
| 133 |
+
# too easy to miss the relevant part of the error message.
|
| 134 |
+
self.stderr.write("")
|
| 135 |
+
raise processed
|
| 136 |
+
if processed:
|
| 137 |
+
self.log("Post-processed '%s' as '%s'" %
|
| 138 |
+
(original_path, processed_path), level=1)
|
| 139 |
+
self.post_processed_files.append(original_path)
|
| 140 |
+
else:
|
| 141 |
+
self.log("Skipped post-processing '%s'" % original_path)
|
| 142 |
+
|
| 143 |
+
return {
|
| 144 |
+
'modified': self.copied_files + self.symlinked_files,
|
| 145 |
+
'unmodified': self.unmodified_files,
|
| 146 |
+
'post_processed': self.post_processed_files,
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
def handle(self, **options):
|
| 150 |
+
self.set_options(**options)
|
| 151 |
+
|
| 152 |
+
message = ['\n']
|
| 153 |
+
if self.dry_run:
|
| 154 |
+
message.append(
|
| 155 |
+
'You have activated the --dry-run option so no files will be modified.\n\n'
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
message.append(
|
| 159 |
+
'You have requested to collect static files at the destination\n'
|
| 160 |
+
'location as specified in your settings'
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
if self.is_local_storage() and self.storage.location:
|
| 164 |
+
destination_path = self.storage.location
|
| 165 |
+
message.append(':\n\n %s\n\n' % destination_path)
|
| 166 |
+
should_warn_user = (
|
| 167 |
+
self.storage.exists(destination_path) and
|
| 168 |
+
any(self.storage.listdir(destination_path))
|
| 169 |
+
)
|
| 170 |
+
else:
|
| 171 |
+
destination_path = None
|
| 172 |
+
message.append('.\n\n')
|
| 173 |
+
# Destination files existence not checked; play it safe and warn.
|
| 174 |
+
should_warn_user = True
|
| 175 |
+
|
| 176 |
+
if self.interactive and should_warn_user:
|
| 177 |
+
if self.clear:
|
| 178 |
+
message.append('This will DELETE ALL FILES in this location!\n')
|
| 179 |
+
else:
|
| 180 |
+
message.append('This will overwrite existing files!\n')
|
| 181 |
+
|
| 182 |
+
message.append(
|
| 183 |
+
'Are you sure you want to do this?\n\n'
|
| 184 |
+
"Type 'yes' to continue, or 'no' to cancel: "
|
| 185 |
+
)
|
| 186 |
+
if input(''.join(message)) != 'yes':
|
| 187 |
+
raise CommandError("Collecting static files cancelled.")
|
| 188 |
+
|
| 189 |
+
collected = self.collect()
|
| 190 |
+
modified_count = len(collected['modified'])
|
| 191 |
+
unmodified_count = len(collected['unmodified'])
|
| 192 |
+
post_processed_count = len(collected['post_processed'])
|
| 193 |
+
|
| 194 |
+
if self.verbosity >= 1:
|
| 195 |
+
template = ("\n%(modified_count)s %(identifier)s %(action)s"
|
| 196 |
+
"%(destination)s%(unmodified)s%(post_processed)s.\n")
|
| 197 |
+
summary = template % {
|
| 198 |
+
'modified_count': modified_count,
|
| 199 |
+
'identifier': 'static file' + ('' if modified_count == 1 else 's'),
|
| 200 |
+
'action': 'symlinked' if self.symlink else 'copied',
|
| 201 |
+
'destination': (" to '%s'" % destination_path if destination_path else ''),
|
| 202 |
+
'unmodified': (', %s unmodified' % unmodified_count if collected['unmodified'] else ''),
|
| 203 |
+
'post_processed': (collected['post_processed'] and
|
| 204 |
+
', %s post-processed'
|
| 205 |
+
% post_processed_count or ''),
|
| 206 |
+
}
|
| 207 |
+
return summary
|
| 208 |
+
|
| 209 |
+
def log(self, msg, level=2):
|
| 210 |
+
"""
|
| 211 |
+
Small log helper
|
| 212 |
+
"""
|
| 213 |
+
if self.verbosity >= level:
|
| 214 |
+
self.stdout.write(msg)
|
| 215 |
+
|
| 216 |
+
def is_local_storage(self):
|
| 217 |
+
return isinstance(self.storage, FileSystemStorage)
|
| 218 |
+
|
| 219 |
+
def clear_dir(self, path):
|
| 220 |
+
"""
|
| 221 |
+
Delete the given relative path using the destination storage backend.
|
| 222 |
+
"""
|
| 223 |
+
if not self.storage.exists(path):
|
| 224 |
+
return
|
| 225 |
+
|
| 226 |
+
dirs, files = self.storage.listdir(path)
|
| 227 |
+
for f in files:
|
| 228 |
+
fpath = os.path.join(path, f)
|
| 229 |
+
if self.dry_run:
|
| 230 |
+
self.log("Pretending to delete '%s'" % fpath, level=1)
|
| 231 |
+
else:
|
| 232 |
+
self.log("Deleting '%s'" % fpath, level=1)
|
| 233 |
+
try:
|
| 234 |
+
full_path = self.storage.path(fpath)
|
| 235 |
+
except NotImplementedError:
|
| 236 |
+
self.storage.delete(fpath)
|
| 237 |
+
else:
|
| 238 |
+
if not os.path.exists(full_path) and os.path.lexists(full_path):
|
| 239 |
+
# Delete broken symlinks
|
| 240 |
+
os.unlink(full_path)
|
| 241 |
+
else:
|
| 242 |
+
self.storage.delete(fpath)
|
| 243 |
+
for d in dirs:
|
| 244 |
+
self.clear_dir(os.path.join(path, d))
|
| 245 |
+
|
| 246 |
+
def delete_file(self, path, prefixed_path, source_storage):
|
| 247 |
+
"""
|
| 248 |
+
Check if the target file should be deleted if it already exists.
|
| 249 |
+
"""
|
| 250 |
+
if self.storage.exists(prefixed_path):
|
| 251 |
+
try:
|
| 252 |
+
# When was the target file modified last time?
|
| 253 |
+
target_last_modified = self.storage.get_modified_time(prefixed_path)
|
| 254 |
+
except (OSError, NotImplementedError, AttributeError):
|
| 255 |
+
# The storage doesn't support get_modified_time() or failed
|
| 256 |
+
pass
|
| 257 |
+
else:
|
| 258 |
+
try:
|
| 259 |
+
# When was the source file modified last time?
|
| 260 |
+
source_last_modified = source_storage.get_modified_time(path)
|
| 261 |
+
except (OSError, NotImplementedError, AttributeError):
|
| 262 |
+
pass
|
| 263 |
+
else:
|
| 264 |
+
# The full path of the target file
|
| 265 |
+
if self.local:
|
| 266 |
+
full_path = self.storage.path(prefixed_path)
|
| 267 |
+
# If it's --link mode and the path isn't a link (i.e.
|
| 268 |
+
# the previous collectstatic wasn't with --link) or if
|
| 269 |
+
# it's non-link mode and the path is a link (i.e. the
|
| 270 |
+
# previous collectstatic was with --link), the old
|
| 271 |
+
# links/files must be deleted so it's not safe to skip
|
| 272 |
+
# unmodified files.
|
| 273 |
+
can_skip_unmodified_files = not (self.symlink ^ os.path.islink(full_path))
|
| 274 |
+
else:
|
| 275 |
+
full_path = None
|
| 276 |
+
# In remote storages, skipping is only based on the
|
| 277 |
+
# modified times since symlinks aren't relevant.
|
| 278 |
+
can_skip_unmodified_files = True
|
| 279 |
+
# Avoid sub-second precision (see #14665, #19540)
|
| 280 |
+
file_is_unmodified = (
|
| 281 |
+
target_last_modified.replace(microsecond=0) >=
|
| 282 |
+
source_last_modified.replace(microsecond=0)
|
| 283 |
+
)
|
| 284 |
+
if file_is_unmodified and can_skip_unmodified_files:
|
| 285 |
+
if prefixed_path not in self.unmodified_files:
|
| 286 |
+
self.unmodified_files.append(prefixed_path)
|
| 287 |
+
self.log("Skipping '%s' (not modified)" % path)
|
| 288 |
+
return False
|
| 289 |
+
# Then delete the existing file if really needed
|
| 290 |
+
if self.dry_run:
|
| 291 |
+
self.log("Pretending to delete '%s'" % path)
|
| 292 |
+
else:
|
| 293 |
+
self.log("Deleting '%s'" % path)
|
| 294 |
+
self.storage.delete(prefixed_path)
|
| 295 |
+
return True
|
| 296 |
+
|
| 297 |
+
def link_file(self, path, prefixed_path, source_storage):
|
| 298 |
+
"""
|
| 299 |
+
Attempt to link ``path``
|
| 300 |
+
"""
|
| 301 |
+
# Skip this file if it was already copied earlier
|
| 302 |
+
if prefixed_path in self.symlinked_files:
|
| 303 |
+
return self.log("Skipping '%s' (already linked earlier)" % path)
|
| 304 |
+
# Delete the target file if needed or break
|
| 305 |
+
if not self.delete_file(path, prefixed_path, source_storage):
|
| 306 |
+
return
|
| 307 |
+
# The full path of the source file
|
| 308 |
+
source_path = source_storage.path(path)
|
| 309 |
+
# Finally link the file
|
| 310 |
+
if self.dry_run:
|
| 311 |
+
self.log("Pretending to link '%s'" % source_path, level=1)
|
| 312 |
+
else:
|
| 313 |
+
self.log("Linking '%s'" % source_path, level=1)
|
| 314 |
+
full_path = self.storage.path(prefixed_path)
|
| 315 |
+
try:
|
| 316 |
+
os.makedirs(os.path.dirname(full_path))
|
| 317 |
+
except OSError:
|
| 318 |
+
pass
|
| 319 |
+
try:
|
| 320 |
+
if os.path.lexists(full_path):
|
| 321 |
+
os.unlink(full_path)
|
| 322 |
+
os.symlink(source_path, full_path)
|
| 323 |
+
except AttributeError:
|
| 324 |
+
import platform
|
| 325 |
+
raise CommandError("Symlinking is not supported by Python %s." %
|
| 326 |
+
platform.python_version())
|
| 327 |
+
except NotImplementedError:
|
| 328 |
+
import platform
|
| 329 |
+
raise CommandError("Symlinking is not supported in this "
|
| 330 |
+
"platform (%s)." % platform.platform())
|
| 331 |
+
except OSError as e:
|
| 332 |
+
raise CommandError(e)
|
| 333 |
+
if prefixed_path not in self.symlinked_files:
|
| 334 |
+
self.symlinked_files.append(prefixed_path)
|
| 335 |
+
|
| 336 |
+
def copy_file(self, path, prefixed_path, source_storage):
|
| 337 |
+
"""
|
| 338 |
+
Attempt to copy ``path`` with storage
|
| 339 |
+
"""
|
| 340 |
+
# Skip this file if it was already copied earlier
|
| 341 |
+
if prefixed_path in self.copied_files:
|
| 342 |
+
return self.log("Skipping '%s' (already copied earlier)" % path)
|
| 343 |
+
# Delete the target file if needed or break
|
| 344 |
+
if not self.delete_file(path, prefixed_path, source_storage):
|
| 345 |
+
return
|
| 346 |
+
# The full path of the source file
|
| 347 |
+
source_path = source_storage.path(path)
|
| 348 |
+
# Finally start copying
|
| 349 |
+
if self.dry_run:
|
| 350 |
+
self.log("Pretending to copy '%s'" % source_path, level=1)
|
| 351 |
+
else:
|
| 352 |
+
self.log("Copying '%s'" % source_path, level=1)
|
| 353 |
+
with source_storage.open(path) as source_file:
|
| 354 |
+
self.storage.save(prefixed_path, source_file)
|
| 355 |
+
self.copied_files.append(prefixed_path)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/findstatic.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
from django.contrib.staticfiles import finders
|
| 4 |
+
from django.core.management.base import LabelCommand
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class Command(LabelCommand):
|
| 8 |
+
help = "Finds the absolute paths for the given static file(s)."
|
| 9 |
+
label = 'staticfile'
|
| 10 |
+
|
| 11 |
+
def add_arguments(self, parser):
|
| 12 |
+
super().add_arguments(parser)
|
| 13 |
+
parser.add_argument(
|
| 14 |
+
'--first', action='store_false', dest='all',
|
| 15 |
+
help="Only return the first match for each static file.",
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
def handle_label(self, path, **options):
|
| 19 |
+
verbosity = options['verbosity']
|
| 20 |
+
result = finders.find(path, all=options['all'])
|
| 21 |
+
if verbosity >= 2:
|
| 22 |
+
searched_locations = (
|
| 23 |
+
"\nLooking in the following locations:\n %s" %
|
| 24 |
+
"\n ".join(finders.searched_locations)
|
| 25 |
+
)
|
| 26 |
+
else:
|
| 27 |
+
searched_locations = ''
|
| 28 |
+
if result:
|
| 29 |
+
if not isinstance(result, (list, tuple)):
|
| 30 |
+
result = [result]
|
| 31 |
+
result = (os.path.realpath(path) for path in result)
|
| 32 |
+
if verbosity >= 1:
|
| 33 |
+
file_list = '\n '.join(result)
|
| 34 |
+
return ("Found '%s' here:\n %s%s" %
|
| 35 |
+
(path, file_list, searched_locations))
|
| 36 |
+
else:
|
| 37 |
+
return '\n'.join(result)
|
| 38 |
+
else:
|
| 39 |
+
message = ["No matching file found for '%s'." % path]
|
| 40 |
+
if verbosity >= 2:
|
| 41 |
+
message.append(searched_locations)
|
| 42 |
+
if verbosity >= 1:
|
| 43 |
+
self.stderr.write('\n'.join(message))
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/management/commands/runserver.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
| 3 |
+
from django.core.management.commands.runserver import (
|
| 4 |
+
Command as RunserverCommand,
|
| 5 |
+
)
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class Command(RunserverCommand):
|
| 9 |
+
help = "Starts a lightweight Web server for development and also serves static files."
|
| 10 |
+
|
| 11 |
+
def add_arguments(self, parser):
|
| 12 |
+
super().add_arguments(parser)
|
| 13 |
+
parser.add_argument(
|
| 14 |
+
'--nostatic', action="store_false", dest='use_static_handler',
|
| 15 |
+
help='Tells Django to NOT automatically serve static files at STATIC_URL.',
|
| 16 |
+
)
|
| 17 |
+
parser.add_argument(
|
| 18 |
+
'--insecure', action="store_true", dest='insecure_serving',
|
| 19 |
+
help='Allows serving static files even if DEBUG is False.',
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
def get_handler(self, *args, **options):
|
| 23 |
+
"""
|
| 24 |
+
Return the static files serving handler wrapping the default handler,
|
| 25 |
+
if static files should be served. Otherwise return the default handler.
|
| 26 |
+
"""
|
| 27 |
+
handler = super().get_handler(*args, **options)
|
| 28 |
+
use_static_handler = options['use_static_handler']
|
| 29 |
+
insecure_serving = options['insecure_serving']
|
| 30 |
+
if use_static_handler and (settings.DEBUG or insecure_serving):
|
| 31 |
+
return StaticFilesHandler(handler)
|
| 32 |
+
return handler
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/storage.py
ADDED
|
@@ -0,0 +1,497 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
import posixpath
|
| 5 |
+
import re
|
| 6 |
+
from collections import OrderedDict
|
| 7 |
+
from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit
|
| 8 |
+
|
| 9 |
+
from django.conf import settings
|
| 10 |
+
from django.contrib.staticfiles.utils import check_settings, matches_patterns
|
| 11 |
+
from django.core.cache import (
|
| 12 |
+
InvalidCacheBackendError, cache as default_cache, caches,
|
| 13 |
+
)
|
| 14 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 15 |
+
from django.core.files.base import ContentFile
|
| 16 |
+
from django.core.files.storage import FileSystemStorage, get_storage_class
|
| 17 |
+
from django.utils.encoding import force_bytes
|
| 18 |
+
from django.utils.functional import LazyObject
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class StaticFilesStorage(FileSystemStorage):
|
| 22 |
+
"""
|
| 23 |
+
Standard file system storage for static files.
|
| 24 |
+
|
| 25 |
+
The defaults for ``location`` and ``base_url`` are
|
| 26 |
+
``STATIC_ROOT`` and ``STATIC_URL``.
|
| 27 |
+
"""
|
| 28 |
+
def __init__(self, location=None, base_url=None, *args, **kwargs):
|
| 29 |
+
if location is None:
|
| 30 |
+
location = settings.STATIC_ROOT
|
| 31 |
+
if base_url is None:
|
| 32 |
+
base_url = settings.STATIC_URL
|
| 33 |
+
check_settings(base_url)
|
| 34 |
+
super().__init__(location, base_url, *args, **kwargs)
|
| 35 |
+
# FileSystemStorage fallbacks to MEDIA_ROOT when location
|
| 36 |
+
# is empty, so we restore the empty value.
|
| 37 |
+
if not location:
|
| 38 |
+
self.base_location = None
|
| 39 |
+
self.location = None
|
| 40 |
+
|
| 41 |
+
def path(self, name):
|
| 42 |
+
if not self.location:
|
| 43 |
+
raise ImproperlyConfigured("You're using the staticfiles app "
|
| 44 |
+
"without having set the STATIC_ROOT "
|
| 45 |
+
"setting to a filesystem path.")
|
| 46 |
+
return super().path(name)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class HashedFilesMixin:
|
| 50 |
+
default_template = """url("%s")"""
|
| 51 |
+
max_post_process_passes = 5
|
| 52 |
+
patterns = (
|
| 53 |
+
("*.css", (
|
| 54 |
+
r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
|
| 55 |
+
(r"""(@import\s*["']\s*(.*?)["'])""", """@import url("%s")"""),
|
| 56 |
+
)),
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
def __init__(self, *args, **kwargs):
|
| 60 |
+
super().__init__(*args, **kwargs)
|
| 61 |
+
self._patterns = OrderedDict()
|
| 62 |
+
self.hashed_files = {}
|
| 63 |
+
for extension, patterns in self.patterns:
|
| 64 |
+
for pattern in patterns:
|
| 65 |
+
if isinstance(pattern, (tuple, list)):
|
| 66 |
+
pattern, template = pattern
|
| 67 |
+
else:
|
| 68 |
+
template = self.default_template
|
| 69 |
+
compiled = re.compile(pattern, re.IGNORECASE)
|
| 70 |
+
self._patterns.setdefault(extension, []).append((compiled, template))
|
| 71 |
+
|
| 72 |
+
def file_hash(self, name, content=None):
|
| 73 |
+
"""
|
| 74 |
+
Return a hash of the file with the given name and optional content.
|
| 75 |
+
"""
|
| 76 |
+
if content is None:
|
| 77 |
+
return None
|
| 78 |
+
md5 = hashlib.md5()
|
| 79 |
+
for chunk in content.chunks():
|
| 80 |
+
md5.update(chunk)
|
| 81 |
+
return md5.hexdigest()[:12]
|
| 82 |
+
|
| 83 |
+
def hashed_name(self, name, content=None, filename=None):
|
| 84 |
+
# `filename` is the name of file to hash if `content` isn't given.
|
| 85 |
+
# `name` is the base name to construct the new hashed filename from.
|
| 86 |
+
parsed_name = urlsplit(unquote(name))
|
| 87 |
+
clean_name = parsed_name.path.strip()
|
| 88 |
+
if filename:
|
| 89 |
+
filename = urlsplit(unquote(filename)).path.strip()
|
| 90 |
+
filename = filename or clean_name
|
| 91 |
+
opened = False
|
| 92 |
+
if content is None:
|
| 93 |
+
if not self.exists(filename):
|
| 94 |
+
raise ValueError("The file '%s' could not be found with %r." % (filename, self))
|
| 95 |
+
try:
|
| 96 |
+
content = self.open(filename)
|
| 97 |
+
except IOError:
|
| 98 |
+
# Handle directory paths and fragments
|
| 99 |
+
return name
|
| 100 |
+
opened = True
|
| 101 |
+
try:
|
| 102 |
+
file_hash = self.file_hash(clean_name, content)
|
| 103 |
+
finally:
|
| 104 |
+
if opened:
|
| 105 |
+
content.close()
|
| 106 |
+
path, filename = os.path.split(clean_name)
|
| 107 |
+
root, ext = os.path.splitext(filename)
|
| 108 |
+
if file_hash is not None:
|
| 109 |
+
file_hash = ".%s" % file_hash
|
| 110 |
+
hashed_name = os.path.join(path, "%s%s%s" %
|
| 111 |
+
(root, file_hash, ext))
|
| 112 |
+
unparsed_name = list(parsed_name)
|
| 113 |
+
unparsed_name[2] = hashed_name
|
| 114 |
+
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
| 115 |
+
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
| 116 |
+
if '?#' in name and not unparsed_name[3]:
|
| 117 |
+
unparsed_name[2] += '?'
|
| 118 |
+
return urlunsplit(unparsed_name)
|
| 119 |
+
|
| 120 |
+
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
|
| 121 |
+
"""
|
| 122 |
+
Return the non-hashed URL in DEBUG mode.
|
| 123 |
+
"""
|
| 124 |
+
if settings.DEBUG and not force:
|
| 125 |
+
hashed_name, fragment = name, ''
|
| 126 |
+
else:
|
| 127 |
+
clean_name, fragment = urldefrag(name)
|
| 128 |
+
if urlsplit(clean_name).path.endswith('/'): # don't hash paths
|
| 129 |
+
hashed_name = name
|
| 130 |
+
else:
|
| 131 |
+
args = (clean_name,)
|
| 132 |
+
if hashed_files is not None:
|
| 133 |
+
args += (hashed_files,)
|
| 134 |
+
hashed_name = hashed_name_func(*args)
|
| 135 |
+
|
| 136 |
+
final_url = super().url(hashed_name)
|
| 137 |
+
|
| 138 |
+
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
| 139 |
+
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
| 140 |
+
query_fragment = '?#' in name # [sic!]
|
| 141 |
+
if fragment or query_fragment:
|
| 142 |
+
urlparts = list(urlsplit(final_url))
|
| 143 |
+
if fragment and not urlparts[4]:
|
| 144 |
+
urlparts[4] = fragment
|
| 145 |
+
if query_fragment and not urlparts[3]:
|
| 146 |
+
urlparts[2] += '?'
|
| 147 |
+
final_url = urlunsplit(urlparts)
|
| 148 |
+
|
| 149 |
+
return unquote(final_url)
|
| 150 |
+
|
| 151 |
+
def url(self, name, force=False):
|
| 152 |
+
"""
|
| 153 |
+
Return the non-hashed URL in DEBUG mode.
|
| 154 |
+
"""
|
| 155 |
+
return self._url(self.stored_name, name, force)
|
| 156 |
+
|
| 157 |
+
def url_converter(self, name, hashed_files, template=None):
|
| 158 |
+
"""
|
| 159 |
+
Return the custom URL converter for the given file name.
|
| 160 |
+
"""
|
| 161 |
+
if template is None:
|
| 162 |
+
template = self.default_template
|
| 163 |
+
|
| 164 |
+
def converter(matchobj):
|
| 165 |
+
"""
|
| 166 |
+
Convert the matched URL to a normalized and hashed URL.
|
| 167 |
+
|
| 168 |
+
This requires figuring out which files the matched URL resolves
|
| 169 |
+
to and calling the url() method of the storage.
|
| 170 |
+
"""
|
| 171 |
+
matched, url = matchobj.groups()
|
| 172 |
+
|
| 173 |
+
# Ignore absolute/protocol-relative and data-uri URLs.
|
| 174 |
+
if re.match(r'^[a-z]+:', url):
|
| 175 |
+
return matched
|
| 176 |
+
|
| 177 |
+
# Ignore absolute URLs that don't point to a static file (dynamic
|
| 178 |
+
# CSS / JS?). Note that STATIC_URL cannot be empty.
|
| 179 |
+
if url.startswith('/') and not url.startswith(settings.STATIC_URL):
|
| 180 |
+
return matched
|
| 181 |
+
|
| 182 |
+
# Strip off the fragment so a path-like fragment won't interfere.
|
| 183 |
+
url_path, fragment = urldefrag(url)
|
| 184 |
+
|
| 185 |
+
if url_path.startswith('/'):
|
| 186 |
+
# Otherwise the condition above would have returned prematurely.
|
| 187 |
+
assert url_path.startswith(settings.STATIC_URL)
|
| 188 |
+
target_name = url_path[len(settings.STATIC_URL):]
|
| 189 |
+
else:
|
| 190 |
+
# We're using the posixpath module to mix paths and URLs conveniently.
|
| 191 |
+
source_name = name if os.sep == '/' else name.replace(os.sep, '/')
|
| 192 |
+
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
|
| 193 |
+
|
| 194 |
+
# Determine the hashed name of the target file with the storage backend.
|
| 195 |
+
hashed_url = self._url(
|
| 196 |
+
self._stored_name, unquote(target_name),
|
| 197 |
+
force=True, hashed_files=hashed_files,
|
| 198 |
+
)
|
| 199 |
+
|
| 200 |
+
transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:])
|
| 201 |
+
|
| 202 |
+
# Restore the fragment that was stripped off earlier.
|
| 203 |
+
if fragment:
|
| 204 |
+
transformed_url += ('?#' if '?#' in url else '#') + fragment
|
| 205 |
+
|
| 206 |
+
# Return the hashed version to the file
|
| 207 |
+
return template % unquote(transformed_url)
|
| 208 |
+
|
| 209 |
+
return converter
|
| 210 |
+
|
| 211 |
+
def post_process(self, paths, dry_run=False, **options):
|
| 212 |
+
"""
|
| 213 |
+
Post process the given OrderedDict of files (called from collectstatic).
|
| 214 |
+
|
| 215 |
+
Processing is actually two separate operations:
|
| 216 |
+
|
| 217 |
+
1. renaming files to include a hash of their content for cache-busting,
|
| 218 |
+
and copying those files to the target storage.
|
| 219 |
+
2. adjusting files which contain references to other files so they
|
| 220 |
+
refer to the cache-busting filenames.
|
| 221 |
+
|
| 222 |
+
If either of these are performed on a file, then that file is considered
|
| 223 |
+
post-processed.
|
| 224 |
+
"""
|
| 225 |
+
# don't even dare to process the files if we're in dry run mode
|
| 226 |
+
if dry_run:
|
| 227 |
+
return
|
| 228 |
+
|
| 229 |
+
# where to store the new paths
|
| 230 |
+
hashed_files = OrderedDict()
|
| 231 |
+
|
| 232 |
+
# build a list of adjustable files
|
| 233 |
+
adjustable_paths = [
|
| 234 |
+
path for path in paths
|
| 235 |
+
if matches_patterns(path, self._patterns)
|
| 236 |
+
]
|
| 237 |
+
# Do a single pass first. Post-process all files once, then repeat for
|
| 238 |
+
# adjustable files.
|
| 239 |
+
for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files):
|
| 240 |
+
yield name, hashed_name, processed
|
| 241 |
+
|
| 242 |
+
paths = {path: paths[path] for path in adjustable_paths}
|
| 243 |
+
|
| 244 |
+
for i in range(self.max_post_process_passes):
|
| 245 |
+
substitutions = False
|
| 246 |
+
for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files):
|
| 247 |
+
yield name, hashed_name, processed
|
| 248 |
+
substitutions = substitutions or subst
|
| 249 |
+
|
| 250 |
+
if not substitutions:
|
| 251 |
+
break
|
| 252 |
+
|
| 253 |
+
if substitutions:
|
| 254 |
+
yield 'All', None, RuntimeError('Max post-process passes exceeded.')
|
| 255 |
+
|
| 256 |
+
# Store the processed paths
|
| 257 |
+
self.hashed_files.update(hashed_files)
|
| 258 |
+
|
| 259 |
+
def _post_process(self, paths, adjustable_paths, hashed_files):
|
| 260 |
+
# Sort the files by directory level
|
| 261 |
+
def path_level(name):
|
| 262 |
+
return len(name.split(os.sep))
|
| 263 |
+
|
| 264 |
+
for name in sorted(paths, key=path_level, reverse=True):
|
| 265 |
+
substitutions = True
|
| 266 |
+
# use the original, local file, not the copied-but-unprocessed
|
| 267 |
+
# file, which might be somewhere far away, like S3
|
| 268 |
+
storage, path = paths[name]
|
| 269 |
+
with storage.open(path) as original_file:
|
| 270 |
+
cleaned_name = self.clean_name(name)
|
| 271 |
+
hash_key = self.hash_key(cleaned_name)
|
| 272 |
+
|
| 273 |
+
# generate the hash with the original content, even for
|
| 274 |
+
# adjustable files.
|
| 275 |
+
if hash_key not in hashed_files:
|
| 276 |
+
hashed_name = self.hashed_name(name, original_file)
|
| 277 |
+
else:
|
| 278 |
+
hashed_name = hashed_files[hash_key]
|
| 279 |
+
|
| 280 |
+
# then get the original's file content..
|
| 281 |
+
if hasattr(original_file, 'seek'):
|
| 282 |
+
original_file.seek(0)
|
| 283 |
+
|
| 284 |
+
hashed_file_exists = self.exists(hashed_name)
|
| 285 |
+
processed = False
|
| 286 |
+
|
| 287 |
+
# ..to apply each replacement pattern to the content
|
| 288 |
+
if name in adjustable_paths:
|
| 289 |
+
old_hashed_name = hashed_name
|
| 290 |
+
content = original_file.read().decode(settings.FILE_CHARSET)
|
| 291 |
+
for extension, patterns in self._patterns.items():
|
| 292 |
+
if matches_patterns(path, (extension,)):
|
| 293 |
+
for pattern, template in patterns:
|
| 294 |
+
converter = self.url_converter(name, hashed_files, template)
|
| 295 |
+
try:
|
| 296 |
+
content = pattern.sub(converter, content)
|
| 297 |
+
except ValueError as exc:
|
| 298 |
+
yield name, None, exc, False
|
| 299 |
+
if hashed_file_exists:
|
| 300 |
+
self.delete(hashed_name)
|
| 301 |
+
# then save the processed result
|
| 302 |
+
content_file = ContentFile(force_bytes(content))
|
| 303 |
+
# Save intermediate file for reference
|
| 304 |
+
saved_name = self._save(hashed_name, content_file)
|
| 305 |
+
hashed_name = self.hashed_name(name, content_file)
|
| 306 |
+
|
| 307 |
+
if self.exists(hashed_name):
|
| 308 |
+
self.delete(hashed_name)
|
| 309 |
+
|
| 310 |
+
saved_name = self._save(hashed_name, content_file)
|
| 311 |
+
hashed_name = self.clean_name(saved_name)
|
| 312 |
+
# If the file hash stayed the same, this file didn't change
|
| 313 |
+
if old_hashed_name == hashed_name:
|
| 314 |
+
substitutions = False
|
| 315 |
+
processed = True
|
| 316 |
+
|
| 317 |
+
if not processed:
|
| 318 |
+
# or handle the case in which neither processing nor
|
| 319 |
+
# a change to the original file happened
|
| 320 |
+
if not hashed_file_exists:
|
| 321 |
+
processed = True
|
| 322 |
+
saved_name = self._save(hashed_name, original_file)
|
| 323 |
+
hashed_name = self.clean_name(saved_name)
|
| 324 |
+
|
| 325 |
+
# and then set the cache accordingly
|
| 326 |
+
hashed_files[hash_key] = hashed_name
|
| 327 |
+
|
| 328 |
+
yield name, hashed_name, processed, substitutions
|
| 329 |
+
|
| 330 |
+
def clean_name(self, name):
|
| 331 |
+
return name.replace('\\', '/')
|
| 332 |
+
|
| 333 |
+
def hash_key(self, name):
|
| 334 |
+
return name
|
| 335 |
+
|
| 336 |
+
def _stored_name(self, name, hashed_files):
|
| 337 |
+
# Normalize the path to avoid multiple names for the same file like
|
| 338 |
+
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
|
| 339 |
+
# path.
|
| 340 |
+
name = posixpath.normpath(name)
|
| 341 |
+
cleaned_name = self.clean_name(name)
|
| 342 |
+
hash_key = self.hash_key(cleaned_name)
|
| 343 |
+
cache_name = hashed_files.get(hash_key)
|
| 344 |
+
if cache_name is None:
|
| 345 |
+
cache_name = self.clean_name(self.hashed_name(name))
|
| 346 |
+
return cache_name
|
| 347 |
+
|
| 348 |
+
def stored_name(self, name):
|
| 349 |
+
cleaned_name = self.clean_name(name)
|
| 350 |
+
hash_key = self.hash_key(cleaned_name)
|
| 351 |
+
cache_name = self.hashed_files.get(hash_key)
|
| 352 |
+
if cache_name:
|
| 353 |
+
return cache_name
|
| 354 |
+
# No cached name found, recalculate it from the files.
|
| 355 |
+
intermediate_name = name
|
| 356 |
+
for i in range(self.max_post_process_passes + 1):
|
| 357 |
+
cache_name = self.clean_name(
|
| 358 |
+
self.hashed_name(name, content=None, filename=intermediate_name)
|
| 359 |
+
)
|
| 360 |
+
if intermediate_name == cache_name:
|
| 361 |
+
# Store the hashed name if there was a miss.
|
| 362 |
+
self.hashed_files[hash_key] = cache_name
|
| 363 |
+
return cache_name
|
| 364 |
+
else:
|
| 365 |
+
# Move on to the next intermediate file.
|
| 366 |
+
intermediate_name = cache_name
|
| 367 |
+
# If the cache name can't be determined after the max number of passes,
|
| 368 |
+
# the intermediate files on disk may be corrupt; avoid an infinite loop.
|
| 369 |
+
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
class ManifestFilesMixin(HashedFilesMixin):
|
| 373 |
+
manifest_version = '1.0' # the manifest format standard
|
| 374 |
+
manifest_name = 'staticfiles.json'
|
| 375 |
+
manifest_strict = True
|
| 376 |
+
|
| 377 |
+
def __init__(self, *args, **kwargs):
|
| 378 |
+
super().__init__(*args, **kwargs)
|
| 379 |
+
self.hashed_files = self.load_manifest()
|
| 380 |
+
|
| 381 |
+
def read_manifest(self):
|
| 382 |
+
try:
|
| 383 |
+
with self.open(self.manifest_name) as manifest:
|
| 384 |
+
return manifest.read().decode()
|
| 385 |
+
except IOError:
|
| 386 |
+
return None
|
| 387 |
+
|
| 388 |
+
def load_manifest(self):
|
| 389 |
+
content = self.read_manifest()
|
| 390 |
+
if content is None:
|
| 391 |
+
return OrderedDict()
|
| 392 |
+
try:
|
| 393 |
+
stored = json.loads(content, object_pairs_hook=OrderedDict)
|
| 394 |
+
except ValueError:
|
| 395 |
+
pass
|
| 396 |
+
else:
|
| 397 |
+
version = stored.get('version')
|
| 398 |
+
if version == '1.0':
|
| 399 |
+
return stored.get('paths', OrderedDict())
|
| 400 |
+
raise ValueError("Couldn't load manifest '%s' (version %s)" %
|
| 401 |
+
(self.manifest_name, self.manifest_version))
|
| 402 |
+
|
| 403 |
+
def post_process(self, *args, **kwargs):
|
| 404 |
+
self.hashed_files = OrderedDict()
|
| 405 |
+
yield from super().post_process(*args, **kwargs)
|
| 406 |
+
self.save_manifest()
|
| 407 |
+
|
| 408 |
+
def save_manifest(self):
|
| 409 |
+
payload = {'paths': self.hashed_files, 'version': self.manifest_version}
|
| 410 |
+
if self.exists(self.manifest_name):
|
| 411 |
+
self.delete(self.manifest_name)
|
| 412 |
+
contents = json.dumps(payload).encode()
|
| 413 |
+
self._save(self.manifest_name, ContentFile(contents))
|
| 414 |
+
|
| 415 |
+
def stored_name(self, name):
|
| 416 |
+
parsed_name = urlsplit(unquote(name))
|
| 417 |
+
clean_name = parsed_name.path.strip()
|
| 418 |
+
hash_key = self.hash_key(clean_name)
|
| 419 |
+
cache_name = self.hashed_files.get(hash_key)
|
| 420 |
+
if cache_name is None:
|
| 421 |
+
if self.manifest_strict:
|
| 422 |
+
raise ValueError("Missing staticfiles manifest entry for '%s'" % clean_name)
|
| 423 |
+
cache_name = self.clean_name(self.hashed_name(name))
|
| 424 |
+
unparsed_name = list(parsed_name)
|
| 425 |
+
unparsed_name[2] = cache_name
|
| 426 |
+
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
| 427 |
+
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
| 428 |
+
if '?#' in name and not unparsed_name[3]:
|
| 429 |
+
unparsed_name[2] += '?'
|
| 430 |
+
return urlunsplit(unparsed_name)
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
class _MappingCache:
|
| 434 |
+
"""
|
| 435 |
+
A small dict-like wrapper for a given cache backend instance.
|
| 436 |
+
"""
|
| 437 |
+
def __init__(self, cache):
|
| 438 |
+
self.cache = cache
|
| 439 |
+
|
| 440 |
+
def __setitem__(self, key, value):
|
| 441 |
+
self.cache.set(key, value)
|
| 442 |
+
|
| 443 |
+
def __getitem__(self, key):
|
| 444 |
+
value = self.cache.get(key)
|
| 445 |
+
if value is None:
|
| 446 |
+
raise KeyError("Couldn't find a file name '%s'" % key)
|
| 447 |
+
return value
|
| 448 |
+
|
| 449 |
+
def clear(self):
|
| 450 |
+
self.cache.clear()
|
| 451 |
+
|
| 452 |
+
def update(self, data):
|
| 453 |
+
self.cache.set_many(data)
|
| 454 |
+
|
| 455 |
+
def get(self, key, default=None):
|
| 456 |
+
try:
|
| 457 |
+
return self[key]
|
| 458 |
+
except KeyError:
|
| 459 |
+
return default
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
class CachedFilesMixin(HashedFilesMixin):
|
| 463 |
+
def __init__(self, *args, **kwargs):
|
| 464 |
+
super().__init__(*args, **kwargs)
|
| 465 |
+
try:
|
| 466 |
+
self.hashed_files = _MappingCache(caches['staticfiles'])
|
| 467 |
+
except InvalidCacheBackendError:
|
| 468 |
+
# Use the default backend
|
| 469 |
+
self.hashed_files = _MappingCache(default_cache)
|
| 470 |
+
|
| 471 |
+
def hash_key(self, name):
|
| 472 |
+
key = hashlib.md5(force_bytes(self.clean_name(name))).hexdigest()
|
| 473 |
+
return 'staticfiles:%s' % key
|
| 474 |
+
|
| 475 |
+
|
| 476 |
+
class CachedStaticFilesStorage(CachedFilesMixin, StaticFilesStorage):
|
| 477 |
+
"""
|
| 478 |
+
A static file system storage backend which also saves
|
| 479 |
+
hashed copies of the files it saves.
|
| 480 |
+
"""
|
| 481 |
+
pass
|
| 482 |
+
|
| 483 |
+
|
| 484 |
+
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
|
| 485 |
+
"""
|
| 486 |
+
A static file system storage backend which also saves
|
| 487 |
+
hashed copies of the files it saves.
|
| 488 |
+
"""
|
| 489 |
+
pass
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
class ConfiguredStorage(LazyObject):
|
| 493 |
+
def _setup(self):
|
| 494 |
+
self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)()
|
| 495 |
+
|
| 496 |
+
|
| 497 |
+
staticfiles_storage = ConfiguredStorage()
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/templatetags/staticfiles.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django import template
|
| 2 |
+
from django.templatetags.static import (
|
| 3 |
+
do_static as _do_static, static as _static,
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
register = template.Library()
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def static(path):
|
| 10 |
+
# Backwards compatibility alias for django.templatetags.static.static().
|
| 11 |
+
# Deprecation should start in Django 2.0.
|
| 12 |
+
return _static(path)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@register.tag('static')
|
| 16 |
+
def do_static(parser, token):
|
| 17 |
+
# Backwards compatibility alias for django.templatetags.static.do_static().
|
| 18 |
+
# Deprecation should start in Django 2.0.
|
| 19 |
+
return _do_static(parser, token)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/testing.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
| 2 |
+
from django.test import LiveServerTestCase
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class StaticLiveServerTestCase(LiveServerTestCase):
|
| 6 |
+
"""
|
| 7 |
+
Extend django.test.LiveServerTestCase to transparently overlay at test
|
| 8 |
+
execution-time the assets provided by the staticfiles app finders. This
|
| 9 |
+
means you don't need to run collectstatic before or as a part of your tests
|
| 10 |
+
setup.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
static_handler = StaticFilesHandler
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/urls.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
from django.conf.urls.static import static
|
| 3 |
+
from django.contrib.staticfiles.views import serve
|
| 4 |
+
|
| 5 |
+
urlpatterns = []
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def staticfiles_urlpatterns(prefix=None):
|
| 9 |
+
"""
|
| 10 |
+
Helper function to return a URL pattern for serving static files.
|
| 11 |
+
"""
|
| 12 |
+
if prefix is None:
|
| 13 |
+
prefix = settings.STATIC_URL
|
| 14 |
+
return static(prefix, view=serve)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# Only append if urlpatterns are empty
|
| 18 |
+
if settings.DEBUG and not urlpatterns:
|
| 19 |
+
urlpatterns += staticfiles_urlpatterns()
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/utils.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import fnmatch
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
from django.conf import settings
|
| 5 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def matches_patterns(path, patterns=None):
|
| 9 |
+
"""
|
| 10 |
+
Return True or False depending on whether the ``path`` should be
|
| 11 |
+
ignored (if it matches any pattern in ``ignore_patterns``).
|
| 12 |
+
"""
|
| 13 |
+
if patterns is None:
|
| 14 |
+
patterns = []
|
| 15 |
+
for pattern in patterns:
|
| 16 |
+
if fnmatch.fnmatchcase(path, pattern):
|
| 17 |
+
return True
|
| 18 |
+
return False
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def get_files(storage, ignore_patterns=None, location=''):
|
| 22 |
+
"""
|
| 23 |
+
Recursively walk the storage directories yielding the paths
|
| 24 |
+
of all files that should be copied.
|
| 25 |
+
"""
|
| 26 |
+
if ignore_patterns is None:
|
| 27 |
+
ignore_patterns = []
|
| 28 |
+
directories, files = storage.listdir(location)
|
| 29 |
+
for fn in files:
|
| 30 |
+
if matches_patterns(fn, ignore_patterns):
|
| 31 |
+
continue
|
| 32 |
+
if location:
|
| 33 |
+
fn = os.path.join(location, fn)
|
| 34 |
+
yield fn
|
| 35 |
+
for dir in directories:
|
| 36 |
+
if matches_patterns(dir, ignore_patterns):
|
| 37 |
+
continue
|
| 38 |
+
if location:
|
| 39 |
+
dir = os.path.join(location, dir)
|
| 40 |
+
yield from get_files(storage, ignore_patterns, dir)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def check_settings(base_url=None):
|
| 44 |
+
"""
|
| 45 |
+
Check if the staticfiles settings have sane values.
|
| 46 |
+
"""
|
| 47 |
+
if base_url is None:
|
| 48 |
+
base_url = settings.STATIC_URL
|
| 49 |
+
if not base_url:
|
| 50 |
+
raise ImproperlyConfigured(
|
| 51 |
+
"You're using the staticfiles app "
|
| 52 |
+
"without having set the required STATIC_URL setting.")
|
| 53 |
+
if settings.MEDIA_URL == base_url:
|
| 54 |
+
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
|
| 55 |
+
"settings must have different values")
|
| 56 |
+
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
|
| 57 |
+
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
|
| 58 |
+
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
|
| 59 |
+
"settings must have different values")
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/staticfiles/views.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Views and functions for serving static files. These are only to be used during
|
| 3 |
+
development, and SHOULD NOT be used in a production setting.
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
import posixpath
|
| 8 |
+
|
| 9 |
+
from django.conf import settings
|
| 10 |
+
from django.contrib.staticfiles import finders
|
| 11 |
+
from django.http import Http404
|
| 12 |
+
from django.views import static
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def serve(request, path, insecure=False, **kwargs):
|
| 16 |
+
"""
|
| 17 |
+
Serve static files below a given point in the directory structure or
|
| 18 |
+
from locations inferred from the staticfiles finders.
|
| 19 |
+
|
| 20 |
+
To use, put a URL pattern such as::
|
| 21 |
+
|
| 22 |
+
from django.contrib.staticfiles import views
|
| 23 |
+
|
| 24 |
+
url(r'^(?P<path>.*)$', views.serve)
|
| 25 |
+
|
| 26 |
+
in your URLconf.
|
| 27 |
+
|
| 28 |
+
It uses the django.views.static.serve() view to serve the found files.
|
| 29 |
+
"""
|
| 30 |
+
if not settings.DEBUG and not insecure:
|
| 31 |
+
raise Http404
|
| 32 |
+
normalized_path = posixpath.normpath(path).lstrip('/')
|
| 33 |
+
absolute_path = finders.find(normalized_path)
|
| 34 |
+
if not absolute_path:
|
| 35 |
+
if path.endswith('/') or path == '':
|
| 36 |
+
raise Http404("Directory indexes are not allowed here.")
|
| 37 |
+
raise Http404("'%s' could not be found" % path)
|
| 38 |
+
document_root, path = os.path.split(absolute_path)
|
| 39 |
+
return static.serve(request, path, document_root=document_root, **kwargs)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
default_app_config = 'django.contrib.syndication.apps.SyndicationConfig'
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/apps.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.apps import AppConfig
|
| 2 |
+
from django.utils.translation import gettext_lazy as _
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class SyndicationConfig(AppConfig):
|
| 6 |
+
name = 'django.contrib.syndication'
|
| 7 |
+
verbose_name = _("Syndication")
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/contrib/syndication/views.py
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from calendar import timegm
|
| 2 |
+
|
| 3 |
+
from django.conf import settings
|
| 4 |
+
from django.contrib.sites.shortcuts import get_current_site
|
| 5 |
+
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
|
| 6 |
+
from django.http import Http404, HttpResponse
|
| 7 |
+
from django.template import TemplateDoesNotExist, loader
|
| 8 |
+
from django.utils import feedgenerator
|
| 9 |
+
from django.utils.encoding import iri_to_uri
|
| 10 |
+
from django.utils.html import escape
|
| 11 |
+
from django.utils.http import http_date
|
| 12 |
+
from django.utils.timezone import get_default_timezone, is_naive, make_aware
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def add_domain(domain, url, secure=False):
|
| 16 |
+
protocol = 'https' if secure else 'http'
|
| 17 |
+
if url.startswith('//'):
|
| 18 |
+
# Support network-path reference (see #16753) - RSS requires a protocol
|
| 19 |
+
url = '%s:%s' % (protocol, url)
|
| 20 |
+
elif not url.startswith(('http://', 'https://', 'mailto:')):
|
| 21 |
+
url = iri_to_uri('%s://%s%s' % (protocol, domain, url))
|
| 22 |
+
return url
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class FeedDoesNotExist(ObjectDoesNotExist):
|
| 26 |
+
pass
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class Feed:
|
| 30 |
+
feed_type = feedgenerator.DefaultFeed
|
| 31 |
+
title_template = None
|
| 32 |
+
description_template = None
|
| 33 |
+
|
| 34 |
+
def __call__(self, request, *args, **kwargs):
|
| 35 |
+
try:
|
| 36 |
+
obj = self.get_object(request, *args, **kwargs)
|
| 37 |
+
except ObjectDoesNotExist:
|
| 38 |
+
raise Http404('Feed object does not exist.')
|
| 39 |
+
feedgen = self.get_feed(obj, request)
|
| 40 |
+
response = HttpResponse(content_type=feedgen.content_type)
|
| 41 |
+
if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'):
|
| 42 |
+
# if item_pubdate or item_updateddate is defined for the feed, set
|
| 43 |
+
# header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED
|
| 44 |
+
response['Last-Modified'] = http_date(
|
| 45 |
+
timegm(feedgen.latest_post_date().utctimetuple()))
|
| 46 |
+
feedgen.write(response, 'utf-8')
|
| 47 |
+
return response
|
| 48 |
+
|
| 49 |
+
def item_title(self, item):
|
| 50 |
+
# Titles should be double escaped by default (see #6533)
|
| 51 |
+
return escape(str(item))
|
| 52 |
+
|
| 53 |
+
def item_description(self, item):
|
| 54 |
+
return str(item)
|
| 55 |
+
|
| 56 |
+
def item_link(self, item):
|
| 57 |
+
try:
|
| 58 |
+
return item.get_absolute_url()
|
| 59 |
+
except AttributeError:
|
| 60 |
+
raise ImproperlyConfigured(
|
| 61 |
+
'Give your %s class a get_absolute_url() method, or define an '
|
| 62 |
+
'item_link() method in your Feed class.' % item.__class__.__name__
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
def item_enclosures(self, item):
|
| 66 |
+
enc_url = self._get_dynamic_attr('item_enclosure_url', item)
|
| 67 |
+
if enc_url:
|
| 68 |
+
enc = feedgenerator.Enclosure(
|
| 69 |
+
url=str(enc_url),
|
| 70 |
+
length=str(self._get_dynamic_attr('item_enclosure_length', item)),
|
| 71 |
+
mime_type=str(self._get_dynamic_attr('item_enclosure_mime_type', item)),
|
| 72 |
+
)
|
| 73 |
+
return [enc]
|
| 74 |
+
return []
|
| 75 |
+
|
| 76 |
+
def _get_dynamic_attr(self, attname, obj, default=None):
|
| 77 |
+
try:
|
| 78 |
+
attr = getattr(self, attname)
|
| 79 |
+
except AttributeError:
|
| 80 |
+
return default
|
| 81 |
+
if callable(attr):
|
| 82 |
+
# Check co_argcount rather than try/excepting the function and
|
| 83 |
+
# catching the TypeError, because something inside the function
|
| 84 |
+
# may raise the TypeError. This technique is more accurate.
|
| 85 |
+
try:
|
| 86 |
+
code = attr.__code__
|
| 87 |
+
except AttributeError:
|
| 88 |
+
code = attr.__call__.__code__
|
| 89 |
+
if code.co_argcount == 2: # one argument is 'self'
|
| 90 |
+
return attr(obj)
|
| 91 |
+
else:
|
| 92 |
+
return attr()
|
| 93 |
+
return attr
|
| 94 |
+
|
| 95 |
+
def feed_extra_kwargs(self, obj):
|
| 96 |
+
"""
|
| 97 |
+
Return an extra keyword arguments dictionary that is used when
|
| 98 |
+
initializing the feed generator.
|
| 99 |
+
"""
|
| 100 |
+
return {}
|
| 101 |
+
|
| 102 |
+
def item_extra_kwargs(self, item):
|
| 103 |
+
"""
|
| 104 |
+
Return an extra keyword arguments dictionary that is used with
|
| 105 |
+
the `add_item` call of the feed generator.
|
| 106 |
+
"""
|
| 107 |
+
return {}
|
| 108 |
+
|
| 109 |
+
def get_object(self, request, *args, **kwargs):
|
| 110 |
+
return None
|
| 111 |
+
|
| 112 |
+
def get_context_data(self, **kwargs):
|
| 113 |
+
"""
|
| 114 |
+
Return a dictionary to use as extra context if either
|
| 115 |
+
``self.description_template`` or ``self.item_template`` are used.
|
| 116 |
+
|
| 117 |
+
Default implementation preserves the old behavior
|
| 118 |
+
of using {'obj': item, 'site': current_site} as the context.
|
| 119 |
+
"""
|
| 120 |
+
return {'obj': kwargs.get('item'), 'site': kwargs.get('site')}
|
| 121 |
+
|
| 122 |
+
def get_feed(self, obj, request):
|
| 123 |
+
"""
|
| 124 |
+
Return a feedgenerator.DefaultFeed object, fully populated, for
|
| 125 |
+
this feed. Raise FeedDoesNotExist for invalid parameters.
|
| 126 |
+
"""
|
| 127 |
+
current_site = get_current_site(request)
|
| 128 |
+
|
| 129 |
+
link = self._get_dynamic_attr('link', obj)
|
| 130 |
+
link = add_domain(current_site.domain, link, request.is_secure())
|
| 131 |
+
|
| 132 |
+
feed = self.feed_type(
|
| 133 |
+
title=self._get_dynamic_attr('title', obj),
|
| 134 |
+
subtitle=self._get_dynamic_attr('subtitle', obj),
|
| 135 |
+
link=link,
|
| 136 |
+
description=self._get_dynamic_attr('description', obj),
|
| 137 |
+
language=settings.LANGUAGE_CODE,
|
| 138 |
+
feed_url=add_domain(
|
| 139 |
+
current_site.domain,
|
| 140 |
+
self._get_dynamic_attr('feed_url', obj) or request.path,
|
| 141 |
+
request.is_secure(),
|
| 142 |
+
),
|
| 143 |
+
author_name=self._get_dynamic_attr('author_name', obj),
|
| 144 |
+
author_link=self._get_dynamic_attr('author_link', obj),
|
| 145 |
+
author_email=self._get_dynamic_attr('author_email', obj),
|
| 146 |
+
categories=self._get_dynamic_attr('categories', obj),
|
| 147 |
+
feed_copyright=self._get_dynamic_attr('feed_copyright', obj),
|
| 148 |
+
feed_guid=self._get_dynamic_attr('feed_guid', obj),
|
| 149 |
+
ttl=self._get_dynamic_attr('ttl', obj),
|
| 150 |
+
**self.feed_extra_kwargs(obj)
|
| 151 |
+
)
|
| 152 |
+
|
| 153 |
+
title_tmp = None
|
| 154 |
+
if self.title_template is not None:
|
| 155 |
+
try:
|
| 156 |
+
title_tmp = loader.get_template(self.title_template)
|
| 157 |
+
except TemplateDoesNotExist:
|
| 158 |
+
pass
|
| 159 |
+
|
| 160 |
+
description_tmp = None
|
| 161 |
+
if self.description_template is not None:
|
| 162 |
+
try:
|
| 163 |
+
description_tmp = loader.get_template(self.description_template)
|
| 164 |
+
except TemplateDoesNotExist:
|
| 165 |
+
pass
|
| 166 |
+
|
| 167 |
+
for item in self._get_dynamic_attr('items', obj):
|
| 168 |
+
context = self.get_context_data(item=item, site=current_site,
|
| 169 |
+
obj=obj, request=request)
|
| 170 |
+
if title_tmp is not None:
|
| 171 |
+
title = title_tmp.render(context, request)
|
| 172 |
+
else:
|
| 173 |
+
title = self._get_dynamic_attr('item_title', item)
|
| 174 |
+
if description_tmp is not None:
|
| 175 |
+
description = description_tmp.render(context, request)
|
| 176 |
+
else:
|
| 177 |
+
description = self._get_dynamic_attr('item_description', item)
|
| 178 |
+
link = add_domain(
|
| 179 |
+
current_site.domain,
|
| 180 |
+
self._get_dynamic_attr('item_link', item),
|
| 181 |
+
request.is_secure(),
|
| 182 |
+
)
|
| 183 |
+
enclosures = self._get_dynamic_attr('item_enclosures', item)
|
| 184 |
+
author_name = self._get_dynamic_attr('item_author_name', item)
|
| 185 |
+
if author_name is not None:
|
| 186 |
+
author_email = self._get_dynamic_attr('item_author_email', item)
|
| 187 |
+
author_link = self._get_dynamic_attr('item_author_link', item)
|
| 188 |
+
else:
|
| 189 |
+
author_email = author_link = None
|
| 190 |
+
|
| 191 |
+
tz = get_default_timezone()
|
| 192 |
+
|
| 193 |
+
pubdate = self._get_dynamic_attr('item_pubdate', item)
|
| 194 |
+
if pubdate and is_naive(pubdate):
|
| 195 |
+
pubdate = make_aware(pubdate, tz)
|
| 196 |
+
|
| 197 |
+
updateddate = self._get_dynamic_attr('item_updateddate', item)
|
| 198 |
+
if updateddate and is_naive(updateddate):
|
| 199 |
+
updateddate = make_aware(updateddate, tz)
|
| 200 |
+
|
| 201 |
+
feed.add_item(
|
| 202 |
+
title=title,
|
| 203 |
+
link=link,
|
| 204 |
+
description=description,
|
| 205 |
+
unique_id=self._get_dynamic_attr('item_guid', item, link),
|
| 206 |
+
unique_id_is_permalink=self._get_dynamic_attr(
|
| 207 |
+
'item_guid_is_permalink', item),
|
| 208 |
+
enclosures=enclosures,
|
| 209 |
+
pubdate=pubdate,
|
| 210 |
+
updateddate=updateddate,
|
| 211 |
+
author_name=author_name,
|
| 212 |
+
author_email=author_email,
|
| 213 |
+
author_link=author_link,
|
| 214 |
+
categories=self._get_dynamic_attr('item_categories', item),
|
| 215 |
+
item_copyright=self._get_dynamic_attr('item_copyright', item),
|
| 216 |
+
**self.item_extra_kwargs(item)
|
| 217 |
+
)
|
| 218 |
+
return feed
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/__init__.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Caching framework.
|
| 3 |
+
|
| 4 |
+
This package defines set of cache backends that all conform to a simple API.
|
| 5 |
+
In a nutshell, a cache is a set of values -- which can be any object that
|
| 6 |
+
may be pickled -- identified by string keys. For the complete API, see
|
| 7 |
+
the abstract BaseCache class in django.core.cache.backends.base.
|
| 8 |
+
|
| 9 |
+
Client code should use the `cache` variable defined here to access the default
|
| 10 |
+
cache backend and look up non-default cache backends in the `caches` dict-like
|
| 11 |
+
object.
|
| 12 |
+
|
| 13 |
+
See docs/topics/cache.txt for information on the public API.
|
| 14 |
+
"""
|
| 15 |
+
from threading import local
|
| 16 |
+
|
| 17 |
+
from django.conf import settings
|
| 18 |
+
from django.core import signals
|
| 19 |
+
from django.core.cache.backends.base import (
|
| 20 |
+
BaseCache, CacheKeyWarning, InvalidCacheBackendError,
|
| 21 |
+
)
|
| 22 |
+
from django.utils.module_loading import import_string
|
| 23 |
+
|
| 24 |
+
__all__ = [
|
| 25 |
+
'cache', 'DEFAULT_CACHE_ALIAS', 'InvalidCacheBackendError',
|
| 26 |
+
'CacheKeyWarning', 'BaseCache',
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
DEFAULT_CACHE_ALIAS = 'default'
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _create_cache(backend, **kwargs):
|
| 33 |
+
try:
|
| 34 |
+
# Try to get the CACHES entry for the given backend name first
|
| 35 |
+
try:
|
| 36 |
+
conf = settings.CACHES[backend]
|
| 37 |
+
except KeyError:
|
| 38 |
+
try:
|
| 39 |
+
# Trying to import the given backend, in case it's a dotted path
|
| 40 |
+
import_string(backend)
|
| 41 |
+
except ImportError as e:
|
| 42 |
+
raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
|
| 43 |
+
backend, e))
|
| 44 |
+
location = kwargs.pop('LOCATION', '')
|
| 45 |
+
params = kwargs
|
| 46 |
+
else:
|
| 47 |
+
params = conf.copy()
|
| 48 |
+
params.update(kwargs)
|
| 49 |
+
backend = params.pop('BACKEND')
|
| 50 |
+
location = params.pop('LOCATION', '')
|
| 51 |
+
backend_cls = import_string(backend)
|
| 52 |
+
except ImportError as e:
|
| 53 |
+
raise InvalidCacheBackendError(
|
| 54 |
+
"Could not find backend '%s': %s" % (backend, e))
|
| 55 |
+
return backend_cls(location, params)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class CacheHandler:
|
| 59 |
+
"""
|
| 60 |
+
A Cache Handler to manage access to Cache instances.
|
| 61 |
+
|
| 62 |
+
Ensure only one instance of each alias exists per thread.
|
| 63 |
+
"""
|
| 64 |
+
def __init__(self):
|
| 65 |
+
self._caches = local()
|
| 66 |
+
|
| 67 |
+
def __getitem__(self, alias):
|
| 68 |
+
try:
|
| 69 |
+
return self._caches.caches[alias]
|
| 70 |
+
except AttributeError:
|
| 71 |
+
self._caches.caches = {}
|
| 72 |
+
except KeyError:
|
| 73 |
+
pass
|
| 74 |
+
|
| 75 |
+
if alias not in settings.CACHES:
|
| 76 |
+
raise InvalidCacheBackendError(
|
| 77 |
+
"Could not find config for '%s' in settings.CACHES" % alias
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
cache = _create_cache(alias)
|
| 81 |
+
self._caches.caches[alias] = cache
|
| 82 |
+
return cache
|
| 83 |
+
|
| 84 |
+
def all(self):
|
| 85 |
+
return getattr(self._caches, 'caches', {}).values()
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
caches = CacheHandler()
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class DefaultCacheProxy:
|
| 92 |
+
"""
|
| 93 |
+
Proxy access to the default Cache object's attributes.
|
| 94 |
+
|
| 95 |
+
This allows the legacy `cache` object to be thread-safe using the new
|
| 96 |
+
``caches`` API.
|
| 97 |
+
"""
|
| 98 |
+
def __getattr__(self, name):
|
| 99 |
+
return getattr(caches[DEFAULT_CACHE_ALIAS], name)
|
| 100 |
+
|
| 101 |
+
def __setattr__(self, name, value):
|
| 102 |
+
return setattr(caches[DEFAULT_CACHE_ALIAS], name, value)
|
| 103 |
+
|
| 104 |
+
def __delattr__(self, name):
|
| 105 |
+
return delattr(caches[DEFAULT_CACHE_ALIAS], name)
|
| 106 |
+
|
| 107 |
+
def __contains__(self, key):
|
| 108 |
+
return key in caches[DEFAULT_CACHE_ALIAS]
|
| 109 |
+
|
| 110 |
+
def __eq__(self, other):
|
| 111 |
+
return caches[DEFAULT_CACHE_ALIAS] == other
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
cache = DefaultCacheProxy()
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def close_caches(**kwargs):
|
| 118 |
+
# Some caches -- python-memcached in particular -- need to do a cleanup at the
|
| 119 |
+
# end of a request cycle. If not implemented in a particular backend
|
| 120 |
+
# cache.close is a no-op
|
| 121 |
+
for cache in caches.all():
|
| 122 |
+
cache.close()
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
signals.request_finished.connect(close_caches)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/base.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Base Cache class."
|
| 2 |
+
import time
|
| 3 |
+
import warnings
|
| 4 |
+
|
| 5 |
+
from django.core.exceptions import ImproperlyConfigured
|
| 6 |
+
from django.utils.module_loading import import_string
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class InvalidCacheBackendError(ImproperlyConfigured):
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class CacheKeyWarning(RuntimeWarning):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# Stub class to ensure not passing in a `timeout` argument results in
|
| 18 |
+
# the default timeout
|
| 19 |
+
DEFAULT_TIMEOUT = object()
|
| 20 |
+
|
| 21 |
+
# Memcached does not accept keys longer than this.
|
| 22 |
+
MEMCACHE_MAX_KEY_LENGTH = 250
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def default_key_func(key, key_prefix, version):
|
| 26 |
+
"""
|
| 27 |
+
Default function to generate keys.
|
| 28 |
+
|
| 29 |
+
Construct the key used by all other methods. By default, prepend
|
| 30 |
+
the `key_prefix'. KEY_FUNCTION can be used to specify an alternate
|
| 31 |
+
function with custom key making behavior.
|
| 32 |
+
"""
|
| 33 |
+
return '%s:%s:%s' % (key_prefix, version, key)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def get_key_func(key_func):
|
| 37 |
+
"""
|
| 38 |
+
Function to decide which key function to use.
|
| 39 |
+
|
| 40 |
+
Default to ``default_key_func``.
|
| 41 |
+
"""
|
| 42 |
+
if key_func is not None:
|
| 43 |
+
if callable(key_func):
|
| 44 |
+
return key_func
|
| 45 |
+
else:
|
| 46 |
+
return import_string(key_func)
|
| 47 |
+
return default_key_func
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class BaseCache:
|
| 51 |
+
def __init__(self, params):
|
| 52 |
+
timeout = params.get('timeout', params.get('TIMEOUT', 300))
|
| 53 |
+
if timeout is not None:
|
| 54 |
+
try:
|
| 55 |
+
timeout = int(timeout)
|
| 56 |
+
except (ValueError, TypeError):
|
| 57 |
+
timeout = 300
|
| 58 |
+
self.default_timeout = timeout
|
| 59 |
+
|
| 60 |
+
options = params.get('OPTIONS', {})
|
| 61 |
+
max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
|
| 62 |
+
try:
|
| 63 |
+
self._max_entries = int(max_entries)
|
| 64 |
+
except (ValueError, TypeError):
|
| 65 |
+
self._max_entries = 300
|
| 66 |
+
|
| 67 |
+
cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
|
| 68 |
+
try:
|
| 69 |
+
self._cull_frequency = int(cull_frequency)
|
| 70 |
+
except (ValueError, TypeError):
|
| 71 |
+
self._cull_frequency = 3
|
| 72 |
+
|
| 73 |
+
self.key_prefix = params.get('KEY_PREFIX', '')
|
| 74 |
+
self.version = params.get('VERSION', 1)
|
| 75 |
+
self.key_func = get_key_func(params.get('KEY_FUNCTION'))
|
| 76 |
+
|
| 77 |
+
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
|
| 78 |
+
"""
|
| 79 |
+
Return the timeout value usable by this backend based upon the provided
|
| 80 |
+
timeout.
|
| 81 |
+
"""
|
| 82 |
+
if timeout == DEFAULT_TIMEOUT:
|
| 83 |
+
timeout = self.default_timeout
|
| 84 |
+
elif timeout == 0:
|
| 85 |
+
# ticket 21147 - avoid time.time() related precision issues
|
| 86 |
+
timeout = -1
|
| 87 |
+
return None if timeout is None else time.time() + timeout
|
| 88 |
+
|
| 89 |
+
def make_key(self, key, version=None):
|
| 90 |
+
"""
|
| 91 |
+
Construct the key used by all other methods. By default, use the
|
| 92 |
+
key_func to generate a key (which, by default, prepends the
|
| 93 |
+
`key_prefix' and 'version'). A different key function can be provided
|
| 94 |
+
at the time of cache construction; alternatively, you can subclass the
|
| 95 |
+
cache backend to provide custom key making behavior.
|
| 96 |
+
"""
|
| 97 |
+
if version is None:
|
| 98 |
+
version = self.version
|
| 99 |
+
|
| 100 |
+
new_key = self.key_func(key, self.key_prefix, version)
|
| 101 |
+
return new_key
|
| 102 |
+
|
| 103 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 104 |
+
"""
|
| 105 |
+
Set a value in the cache if the key does not already exist. If
|
| 106 |
+
timeout is given, use that timeout for the key; otherwise use the
|
| 107 |
+
default cache timeout.
|
| 108 |
+
|
| 109 |
+
Return True if the value was stored, False otherwise.
|
| 110 |
+
"""
|
| 111 |
+
raise NotImplementedError('subclasses of BaseCache must provide an add() method')
|
| 112 |
+
|
| 113 |
+
def get(self, key, default=None, version=None):
|
| 114 |
+
"""
|
| 115 |
+
Fetch a given key from the cache. If the key does not exist, return
|
| 116 |
+
default, which itself defaults to None.
|
| 117 |
+
"""
|
| 118 |
+
raise NotImplementedError('subclasses of BaseCache must provide a get() method')
|
| 119 |
+
|
| 120 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 121 |
+
"""
|
| 122 |
+
Set a value in the cache. If timeout is given, use that timeout for the
|
| 123 |
+
key; otherwise use the default cache timeout.
|
| 124 |
+
"""
|
| 125 |
+
raise NotImplementedError('subclasses of BaseCache must provide a set() method')
|
| 126 |
+
|
| 127 |
+
def delete(self, key, version=None):
|
| 128 |
+
"""
|
| 129 |
+
Delete a key from the cache, failing silently.
|
| 130 |
+
"""
|
| 131 |
+
raise NotImplementedError('subclasses of BaseCache must provide a delete() method')
|
| 132 |
+
|
| 133 |
+
def get_many(self, keys, version=None):
|
| 134 |
+
"""
|
| 135 |
+
Fetch a bunch of keys from the cache. For certain backends (memcached,
|
| 136 |
+
pgsql) this can be *much* faster when fetching multiple values.
|
| 137 |
+
|
| 138 |
+
Return a dict mapping each key in keys to its value. If the given
|
| 139 |
+
key is missing, it will be missing from the response dict.
|
| 140 |
+
"""
|
| 141 |
+
d = {}
|
| 142 |
+
for k in keys:
|
| 143 |
+
val = self.get(k, version=version)
|
| 144 |
+
if val is not None:
|
| 145 |
+
d[k] = val
|
| 146 |
+
return d
|
| 147 |
+
|
| 148 |
+
def get_or_set(self, key, default, timeout=DEFAULT_TIMEOUT, version=None):
|
| 149 |
+
"""
|
| 150 |
+
Fetch a given key from the cache. If the key does not exist,
|
| 151 |
+
add the key and set it to the default value. The default value can
|
| 152 |
+
also be any callable. If timeout is given, use that timeout for the
|
| 153 |
+
key; otherwise use the default cache timeout.
|
| 154 |
+
|
| 155 |
+
Return the value of the key stored or retrieved.
|
| 156 |
+
"""
|
| 157 |
+
val = self.get(key, version=version)
|
| 158 |
+
if val is None:
|
| 159 |
+
if callable(default):
|
| 160 |
+
default = default()
|
| 161 |
+
if default is not None:
|
| 162 |
+
self.add(key, default, timeout=timeout, version=version)
|
| 163 |
+
# Fetch the value again to avoid a race condition if another
|
| 164 |
+
# caller added a value between the first get() and the add()
|
| 165 |
+
# above.
|
| 166 |
+
return self.get(key, default, version=version)
|
| 167 |
+
return val
|
| 168 |
+
|
| 169 |
+
def has_key(self, key, version=None):
|
| 170 |
+
"""
|
| 171 |
+
Return True if the key is in the cache and has not expired.
|
| 172 |
+
"""
|
| 173 |
+
return self.get(key, version=version) is not None
|
| 174 |
+
|
| 175 |
+
def incr(self, key, delta=1, version=None):
|
| 176 |
+
"""
|
| 177 |
+
Add delta to value in the cache. If the key does not exist, raise a
|
| 178 |
+
ValueError exception.
|
| 179 |
+
"""
|
| 180 |
+
value = self.get(key, version=version)
|
| 181 |
+
if value is None:
|
| 182 |
+
raise ValueError("Key '%s' not found" % key)
|
| 183 |
+
new_value = value + delta
|
| 184 |
+
self.set(key, new_value, version=version)
|
| 185 |
+
return new_value
|
| 186 |
+
|
| 187 |
+
def decr(self, key, delta=1, version=None):
|
| 188 |
+
"""
|
| 189 |
+
Subtract delta from value in the cache. If the key does not exist, raise
|
| 190 |
+
a ValueError exception.
|
| 191 |
+
"""
|
| 192 |
+
return self.incr(key, -delta, version=version)
|
| 193 |
+
|
| 194 |
+
def __contains__(self, key):
|
| 195 |
+
"""
|
| 196 |
+
Return True if the key is in the cache and has not expired.
|
| 197 |
+
"""
|
| 198 |
+
# This is a separate method, rather than just a copy of has_key(),
|
| 199 |
+
# so that it always has the same functionality as has_key(), even
|
| 200 |
+
# if a subclass overrides it.
|
| 201 |
+
return self.has_key(key)
|
| 202 |
+
|
| 203 |
+
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
|
| 204 |
+
"""
|
| 205 |
+
Set a bunch of values in the cache at once from a dict of key/value
|
| 206 |
+
pairs. For certain backends (memcached), this is much more efficient
|
| 207 |
+
than calling set() multiple times.
|
| 208 |
+
|
| 209 |
+
If timeout is given, use that timeout for the key; otherwise use the
|
| 210 |
+
default cache timeout.
|
| 211 |
+
|
| 212 |
+
On backends that support it, return a list of keys that failed
|
| 213 |
+
insertion, or an empty list if all keys were inserted successfully.
|
| 214 |
+
"""
|
| 215 |
+
for key, value in data.items():
|
| 216 |
+
self.set(key, value, timeout=timeout, version=version)
|
| 217 |
+
return []
|
| 218 |
+
|
| 219 |
+
def delete_many(self, keys, version=None):
|
| 220 |
+
"""
|
| 221 |
+
Delete a bunch of values in the cache at once. For certain backends
|
| 222 |
+
(memcached), this is much more efficient than calling delete() multiple
|
| 223 |
+
times.
|
| 224 |
+
"""
|
| 225 |
+
for key in keys:
|
| 226 |
+
self.delete(key, version=version)
|
| 227 |
+
|
| 228 |
+
def clear(self):
|
| 229 |
+
"""Remove *all* values from the cache at once."""
|
| 230 |
+
raise NotImplementedError('subclasses of BaseCache must provide a clear() method')
|
| 231 |
+
|
| 232 |
+
def validate_key(self, key):
|
| 233 |
+
"""
|
| 234 |
+
Warn about keys that would not be portable to the memcached
|
| 235 |
+
backend. This encourages (but does not force) writing backend-portable
|
| 236 |
+
cache code.
|
| 237 |
+
"""
|
| 238 |
+
if len(key) > MEMCACHE_MAX_KEY_LENGTH:
|
| 239 |
+
warnings.warn(
|
| 240 |
+
'Cache key will cause errors if used with memcached: %r '
|
| 241 |
+
'(longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH), CacheKeyWarning
|
| 242 |
+
)
|
| 243 |
+
for char in key:
|
| 244 |
+
if ord(char) < 33 or ord(char) == 127:
|
| 245 |
+
warnings.warn(
|
| 246 |
+
'Cache key contains characters that will cause errors if '
|
| 247 |
+
'used with memcached: %r' % key, CacheKeyWarning
|
| 248 |
+
)
|
| 249 |
+
break
|
| 250 |
+
|
| 251 |
+
def incr_version(self, key, delta=1, version=None):
|
| 252 |
+
"""
|
| 253 |
+
Add delta to the cache version for the supplied key. Return the new
|
| 254 |
+
version.
|
| 255 |
+
"""
|
| 256 |
+
if version is None:
|
| 257 |
+
version = self.version
|
| 258 |
+
|
| 259 |
+
value = self.get(key, version=version)
|
| 260 |
+
if value is None:
|
| 261 |
+
raise ValueError("Key '%s' not found" % key)
|
| 262 |
+
|
| 263 |
+
self.set(key, value, version=version + delta)
|
| 264 |
+
self.delete(key, version=version)
|
| 265 |
+
return version + delta
|
| 266 |
+
|
| 267 |
+
def decr_version(self, key, delta=1, version=None):
|
| 268 |
+
"""
|
| 269 |
+
Subtract delta from the cache version for the supplied key. Return the
|
| 270 |
+
new version.
|
| 271 |
+
"""
|
| 272 |
+
return self.incr_version(key, -delta, version)
|
| 273 |
+
|
| 274 |
+
def close(self, **kwargs):
|
| 275 |
+
"""Close the cache connection"""
|
| 276 |
+
pass
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/db.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Database cache backend."
|
| 2 |
+
import base64
|
| 3 |
+
import pickle
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
|
| 6 |
+
from django.conf import settings
|
| 7 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
| 8 |
+
from django.db import DatabaseError, connections, models, router, transaction
|
| 9 |
+
from django.utils import timezone
|
| 10 |
+
from django.utils.encoding import force_bytes
|
| 11 |
+
from django.utils.inspect import func_supports_parameter
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Options:
|
| 15 |
+
"""A class that will quack like a Django model _meta class.
|
| 16 |
+
|
| 17 |
+
This allows cache operations to be controlled by the router
|
| 18 |
+
"""
|
| 19 |
+
def __init__(self, table):
|
| 20 |
+
self.db_table = table
|
| 21 |
+
self.app_label = 'django_cache'
|
| 22 |
+
self.model_name = 'cacheentry'
|
| 23 |
+
self.verbose_name = 'cache entry'
|
| 24 |
+
self.verbose_name_plural = 'cache entries'
|
| 25 |
+
self.object_name = 'CacheEntry'
|
| 26 |
+
self.abstract = False
|
| 27 |
+
self.managed = True
|
| 28 |
+
self.proxy = False
|
| 29 |
+
self.swapped = False
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class BaseDatabaseCache(BaseCache):
|
| 33 |
+
def __init__(self, table, params):
|
| 34 |
+
BaseCache.__init__(self, params)
|
| 35 |
+
self._table = table
|
| 36 |
+
|
| 37 |
+
class CacheEntry:
|
| 38 |
+
_meta = Options(table)
|
| 39 |
+
self.cache_model_class = CacheEntry
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class DatabaseCache(BaseDatabaseCache):
|
| 43 |
+
|
| 44 |
+
# This class uses cursors provided by the database connection. This means
|
| 45 |
+
# it reads expiration values as aware or naive datetimes, depending on the
|
| 46 |
+
# value of USE_TZ and whether the database supports time zones. The ORM's
|
| 47 |
+
# conversion and adaptation infrastructure is then used to avoid comparing
|
| 48 |
+
# aware and naive datetimes accidentally.
|
| 49 |
+
|
| 50 |
+
def get(self, key, default=None, version=None):
|
| 51 |
+
key = self.make_key(key, version=version)
|
| 52 |
+
self.validate_key(key)
|
| 53 |
+
db = router.db_for_read(self.cache_model_class)
|
| 54 |
+
connection = connections[db]
|
| 55 |
+
table = connection.ops.quote_name(self._table)
|
| 56 |
+
|
| 57 |
+
with connection.cursor() as cursor:
|
| 58 |
+
cursor.execute("SELECT cache_key, value, expires FROM %s "
|
| 59 |
+
"WHERE cache_key = %%s" % table, [key])
|
| 60 |
+
row = cursor.fetchone()
|
| 61 |
+
if row is None:
|
| 62 |
+
return default
|
| 63 |
+
|
| 64 |
+
expires = row[2]
|
| 65 |
+
expression = models.Expression(output_field=models.DateTimeField())
|
| 66 |
+
for converter in (connection.ops.get_db_converters(expression) +
|
| 67 |
+
expression.get_db_converters(connection)):
|
| 68 |
+
if func_supports_parameter(converter, 'context'): # RemovedInDjango30Warning
|
| 69 |
+
expires = converter(expires, expression, connection, {})
|
| 70 |
+
else:
|
| 71 |
+
expires = converter(expires, expression, connection)
|
| 72 |
+
|
| 73 |
+
if expires < timezone.now():
|
| 74 |
+
db = router.db_for_write(self.cache_model_class)
|
| 75 |
+
connection = connections[db]
|
| 76 |
+
with connection.cursor() as cursor:
|
| 77 |
+
cursor.execute("DELETE FROM %s "
|
| 78 |
+
"WHERE cache_key = %%s" % table, [key])
|
| 79 |
+
return default
|
| 80 |
+
|
| 81 |
+
value = connection.ops.process_clob(row[1])
|
| 82 |
+
return pickle.loads(base64.b64decode(force_bytes(value)))
|
| 83 |
+
|
| 84 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 85 |
+
key = self.make_key(key, version=version)
|
| 86 |
+
self.validate_key(key)
|
| 87 |
+
self._base_set('set', key, value, timeout)
|
| 88 |
+
|
| 89 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 90 |
+
key = self.make_key(key, version=version)
|
| 91 |
+
self.validate_key(key)
|
| 92 |
+
return self._base_set('add', key, value, timeout)
|
| 93 |
+
|
| 94 |
+
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT):
|
| 95 |
+
timeout = self.get_backend_timeout(timeout)
|
| 96 |
+
db = router.db_for_write(self.cache_model_class)
|
| 97 |
+
connection = connections[db]
|
| 98 |
+
table = connection.ops.quote_name(self._table)
|
| 99 |
+
|
| 100 |
+
with connection.cursor() as cursor:
|
| 101 |
+
cursor.execute("SELECT COUNT(*) FROM %s" % table)
|
| 102 |
+
num = cursor.fetchone()[0]
|
| 103 |
+
now = timezone.now()
|
| 104 |
+
now = now.replace(microsecond=0)
|
| 105 |
+
if timeout is None:
|
| 106 |
+
exp = datetime.max
|
| 107 |
+
elif settings.USE_TZ:
|
| 108 |
+
exp = datetime.utcfromtimestamp(timeout)
|
| 109 |
+
else:
|
| 110 |
+
exp = datetime.fromtimestamp(timeout)
|
| 111 |
+
exp = exp.replace(microsecond=0)
|
| 112 |
+
if num > self._max_entries:
|
| 113 |
+
self._cull(db, cursor, now)
|
| 114 |
+
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
| 115 |
+
# The DB column is expecting a string, so make sure the value is a
|
| 116 |
+
# string, not bytes. Refs #19274.
|
| 117 |
+
b64encoded = base64.b64encode(pickled).decode('latin1')
|
| 118 |
+
try:
|
| 119 |
+
# Note: typecasting for datetimes is needed by some 3rd party
|
| 120 |
+
# database backends. All core backends work without typecasting,
|
| 121 |
+
# so be careful about changes here - test suite will NOT pick
|
| 122 |
+
# regressions.
|
| 123 |
+
with transaction.atomic(using=db):
|
| 124 |
+
cursor.execute("SELECT cache_key, expires FROM %s "
|
| 125 |
+
"WHERE cache_key = %%s" % table, [key])
|
| 126 |
+
result = cursor.fetchone()
|
| 127 |
+
|
| 128 |
+
if result:
|
| 129 |
+
current_expires = result[1]
|
| 130 |
+
expression = models.Expression(output_field=models.DateTimeField())
|
| 131 |
+
for converter in (connection.ops.get_db_converters(expression) +
|
| 132 |
+
expression.get_db_converters(connection)):
|
| 133 |
+
if func_supports_parameter(converter, 'context'): # RemovedInDjango30Warning
|
| 134 |
+
current_expires = converter(current_expires, expression, connection, {})
|
| 135 |
+
else:
|
| 136 |
+
current_expires = converter(current_expires, expression, connection)
|
| 137 |
+
|
| 138 |
+
exp = connection.ops.adapt_datetimefield_value(exp)
|
| 139 |
+
if result and (mode == 'set' or (mode == 'add' and current_expires < now)):
|
| 140 |
+
cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
|
| 141 |
+
"WHERE cache_key = %%s" % table,
|
| 142 |
+
[b64encoded, exp, key])
|
| 143 |
+
else:
|
| 144 |
+
cursor.execute("INSERT INTO %s (cache_key, value, expires) "
|
| 145 |
+
"VALUES (%%s, %%s, %%s)" % table,
|
| 146 |
+
[key, b64encoded, exp])
|
| 147 |
+
except DatabaseError:
|
| 148 |
+
# To be threadsafe, updates/inserts are allowed to fail silently
|
| 149 |
+
return False
|
| 150 |
+
else:
|
| 151 |
+
return True
|
| 152 |
+
|
| 153 |
+
def delete(self, key, version=None):
|
| 154 |
+
key = self.make_key(key, version=version)
|
| 155 |
+
self.validate_key(key)
|
| 156 |
+
|
| 157 |
+
db = router.db_for_write(self.cache_model_class)
|
| 158 |
+
connection = connections[db]
|
| 159 |
+
table = connection.ops.quote_name(self._table)
|
| 160 |
+
|
| 161 |
+
with connection.cursor() as cursor:
|
| 162 |
+
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
|
| 163 |
+
|
| 164 |
+
def has_key(self, key, version=None):
|
| 165 |
+
key = self.make_key(key, version=version)
|
| 166 |
+
self.validate_key(key)
|
| 167 |
+
|
| 168 |
+
db = router.db_for_read(self.cache_model_class)
|
| 169 |
+
connection = connections[db]
|
| 170 |
+
table = connection.ops.quote_name(self._table)
|
| 171 |
+
|
| 172 |
+
if settings.USE_TZ:
|
| 173 |
+
now = datetime.utcnow()
|
| 174 |
+
else:
|
| 175 |
+
now = datetime.now()
|
| 176 |
+
now = now.replace(microsecond=0)
|
| 177 |
+
|
| 178 |
+
with connection.cursor() as cursor:
|
| 179 |
+
cursor.execute("SELECT cache_key FROM %s "
|
| 180 |
+
"WHERE cache_key = %%s and expires > %%s" % table,
|
| 181 |
+
[key, connection.ops.adapt_datetimefield_value(now)])
|
| 182 |
+
return cursor.fetchone() is not None
|
| 183 |
+
|
| 184 |
+
def _cull(self, db, cursor, now):
|
| 185 |
+
if self._cull_frequency == 0:
|
| 186 |
+
self.clear()
|
| 187 |
+
else:
|
| 188 |
+
connection = connections[db]
|
| 189 |
+
table = connection.ops.quote_name(self._table)
|
| 190 |
+
cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
|
| 191 |
+
[connection.ops.adapt_datetimefield_value(now)])
|
| 192 |
+
cursor.execute("SELECT COUNT(*) FROM %s" % table)
|
| 193 |
+
num = cursor.fetchone()[0]
|
| 194 |
+
if num > self._max_entries:
|
| 195 |
+
cull_num = num // self._cull_frequency
|
| 196 |
+
cursor.execute(
|
| 197 |
+
connection.ops.cache_key_culling_sql() % table,
|
| 198 |
+
[cull_num])
|
| 199 |
+
cursor.execute("DELETE FROM %s "
|
| 200 |
+
"WHERE cache_key < %%s" % table,
|
| 201 |
+
[cursor.fetchone()[0]])
|
| 202 |
+
|
| 203 |
+
def clear(self):
|
| 204 |
+
db = router.db_for_write(self.cache_model_class)
|
| 205 |
+
connection = connections[db]
|
| 206 |
+
table = connection.ops.quote_name(self._table)
|
| 207 |
+
with connection.cursor() as cursor:
|
| 208 |
+
cursor.execute('DELETE FROM %s' % table)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/dummy.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Dummy cache backend"
|
| 2 |
+
|
| 3 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class DummyCache(BaseCache):
|
| 7 |
+
def __init__(self, host, *args, **kwargs):
|
| 8 |
+
BaseCache.__init__(self, *args, **kwargs)
|
| 9 |
+
|
| 10 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 11 |
+
key = self.make_key(key, version=version)
|
| 12 |
+
self.validate_key(key)
|
| 13 |
+
return True
|
| 14 |
+
|
| 15 |
+
def get(self, key, default=None, version=None):
|
| 16 |
+
key = self.make_key(key, version=version)
|
| 17 |
+
self.validate_key(key)
|
| 18 |
+
return default
|
| 19 |
+
|
| 20 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 21 |
+
key = self.make_key(key, version=version)
|
| 22 |
+
self.validate_key(key)
|
| 23 |
+
|
| 24 |
+
def delete(self, key, version=None):
|
| 25 |
+
key = self.make_key(key, version=version)
|
| 26 |
+
self.validate_key(key)
|
| 27 |
+
|
| 28 |
+
def get_many(self, keys, version=None):
|
| 29 |
+
return {}
|
| 30 |
+
|
| 31 |
+
def has_key(self, key, version=None):
|
| 32 |
+
key = self.make_key(key, version=version)
|
| 33 |
+
self.validate_key(key)
|
| 34 |
+
return False
|
| 35 |
+
|
| 36 |
+
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
|
| 37 |
+
return []
|
| 38 |
+
|
| 39 |
+
def delete_many(self, keys, version=None):
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
def clear(self):
|
| 43 |
+
pass
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/filebased.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"File-based cache backend"
|
| 2 |
+
import glob
|
| 3 |
+
import hashlib
|
| 4 |
+
import os
|
| 5 |
+
import pickle
|
| 6 |
+
import random
|
| 7 |
+
import tempfile
|
| 8 |
+
import time
|
| 9 |
+
import zlib
|
| 10 |
+
|
| 11 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
| 12 |
+
from django.core.files.move import file_move_safe
|
| 13 |
+
from django.utils.encoding import force_bytes
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class FileBasedCache(BaseCache):
|
| 17 |
+
cache_suffix = '.djcache'
|
| 18 |
+
|
| 19 |
+
def __init__(self, dir, params):
|
| 20 |
+
super().__init__(params)
|
| 21 |
+
self._dir = os.path.abspath(dir)
|
| 22 |
+
self._createdir()
|
| 23 |
+
|
| 24 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 25 |
+
if self.has_key(key, version):
|
| 26 |
+
return False
|
| 27 |
+
self.set(key, value, timeout, version)
|
| 28 |
+
return True
|
| 29 |
+
|
| 30 |
+
def get(self, key, default=None, version=None):
|
| 31 |
+
fname = self._key_to_file(key, version)
|
| 32 |
+
try:
|
| 33 |
+
with open(fname, 'rb') as f:
|
| 34 |
+
if not self._is_expired(f):
|
| 35 |
+
return pickle.loads(zlib.decompress(f.read()))
|
| 36 |
+
except FileNotFoundError:
|
| 37 |
+
pass
|
| 38 |
+
return default
|
| 39 |
+
|
| 40 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 41 |
+
self._createdir() # Cache dir can be deleted at any time.
|
| 42 |
+
fname = self._key_to_file(key, version)
|
| 43 |
+
self._cull() # make some room if necessary
|
| 44 |
+
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
|
| 45 |
+
renamed = False
|
| 46 |
+
try:
|
| 47 |
+
with open(fd, 'wb') as f:
|
| 48 |
+
expiry = self.get_backend_timeout(timeout)
|
| 49 |
+
f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
|
| 50 |
+
f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
|
| 51 |
+
file_move_safe(tmp_path, fname, allow_overwrite=True)
|
| 52 |
+
renamed = True
|
| 53 |
+
finally:
|
| 54 |
+
if not renamed:
|
| 55 |
+
os.remove(tmp_path)
|
| 56 |
+
|
| 57 |
+
def delete(self, key, version=None):
|
| 58 |
+
self._delete(self._key_to_file(key, version))
|
| 59 |
+
|
| 60 |
+
def _delete(self, fname):
|
| 61 |
+
if not fname.startswith(self._dir) or not os.path.exists(fname):
|
| 62 |
+
return
|
| 63 |
+
try:
|
| 64 |
+
os.remove(fname)
|
| 65 |
+
except FileNotFoundError:
|
| 66 |
+
# The file may have been removed by another process.
|
| 67 |
+
pass
|
| 68 |
+
|
| 69 |
+
def has_key(self, key, version=None):
|
| 70 |
+
fname = self._key_to_file(key, version)
|
| 71 |
+
if os.path.exists(fname):
|
| 72 |
+
with open(fname, 'rb') as f:
|
| 73 |
+
return not self._is_expired(f)
|
| 74 |
+
return False
|
| 75 |
+
|
| 76 |
+
def _cull(self):
|
| 77 |
+
"""
|
| 78 |
+
Remove random cache entries if max_entries is reached at a ratio
|
| 79 |
+
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
|
| 80 |
+
that the entire cache will be purged.
|
| 81 |
+
"""
|
| 82 |
+
filelist = self._list_cache_files()
|
| 83 |
+
num_entries = len(filelist)
|
| 84 |
+
if num_entries < self._max_entries:
|
| 85 |
+
return # return early if no culling is required
|
| 86 |
+
if self._cull_frequency == 0:
|
| 87 |
+
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
|
| 88 |
+
# Delete a random selection of entries
|
| 89 |
+
filelist = random.sample(filelist,
|
| 90 |
+
int(num_entries / self._cull_frequency))
|
| 91 |
+
for fname in filelist:
|
| 92 |
+
self._delete(fname)
|
| 93 |
+
|
| 94 |
+
def _createdir(self):
|
| 95 |
+
if not os.path.exists(self._dir):
|
| 96 |
+
try:
|
| 97 |
+
os.makedirs(self._dir, 0o700)
|
| 98 |
+
except FileExistsError:
|
| 99 |
+
pass
|
| 100 |
+
|
| 101 |
+
def _key_to_file(self, key, version=None):
|
| 102 |
+
"""
|
| 103 |
+
Convert a key into a cache file path. Basically this is the
|
| 104 |
+
root cache path joined with the md5sum of the key and a suffix.
|
| 105 |
+
"""
|
| 106 |
+
key = self.make_key(key, version=version)
|
| 107 |
+
self.validate_key(key)
|
| 108 |
+
return os.path.join(self._dir, ''.join(
|
| 109 |
+
[hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))
|
| 110 |
+
|
| 111 |
+
def clear(self):
|
| 112 |
+
"""
|
| 113 |
+
Remove all the cache files.
|
| 114 |
+
"""
|
| 115 |
+
if not os.path.exists(self._dir):
|
| 116 |
+
return
|
| 117 |
+
for fname in self._list_cache_files():
|
| 118 |
+
self._delete(fname)
|
| 119 |
+
|
| 120 |
+
def _is_expired(self, f):
|
| 121 |
+
"""
|
| 122 |
+
Take an open cache file `f` and delete it if it's expired.
|
| 123 |
+
"""
|
| 124 |
+
try:
|
| 125 |
+
exp = pickle.load(f)
|
| 126 |
+
except EOFError:
|
| 127 |
+
exp = 0 # An empty file is considered expired.
|
| 128 |
+
if exp is not None and exp < time.time():
|
| 129 |
+
f.close() # On Windows a file has to be closed before deleting
|
| 130 |
+
self._delete(f.name)
|
| 131 |
+
return True
|
| 132 |
+
return False
|
| 133 |
+
|
| 134 |
+
def _list_cache_files(self):
|
| 135 |
+
"""
|
| 136 |
+
Get a list of paths to all the cache files. These are all the files
|
| 137 |
+
in the root cache dir that end on the cache_suffix.
|
| 138 |
+
"""
|
| 139 |
+
if not os.path.exists(self._dir):
|
| 140 |
+
return []
|
| 141 |
+
filelist = [os.path.join(self._dir, fname) for fname
|
| 142 |
+
in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
|
| 143 |
+
return filelist
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/locmem.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Thread-safe in-memory cache backend."
|
| 2 |
+
import pickle
|
| 3 |
+
import time
|
| 4 |
+
from contextlib import contextmanager
|
| 5 |
+
|
| 6 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
| 7 |
+
from django.utils.synch import RWLock
|
| 8 |
+
|
| 9 |
+
# Global in-memory store of cache data. Keyed by name, to provide
|
| 10 |
+
# multiple named local memory caches.
|
| 11 |
+
_caches = {}
|
| 12 |
+
_expire_info = {}
|
| 13 |
+
_locks = {}
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@contextmanager
|
| 17 |
+
def dummy():
|
| 18 |
+
"""A context manager that does nothing special."""
|
| 19 |
+
yield
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class LocMemCache(BaseCache):
|
| 23 |
+
def __init__(self, name, params):
|
| 24 |
+
BaseCache.__init__(self, params)
|
| 25 |
+
self._cache = _caches.setdefault(name, {})
|
| 26 |
+
self._expire_info = _expire_info.setdefault(name, {})
|
| 27 |
+
self._lock = _locks.setdefault(name, RWLock())
|
| 28 |
+
|
| 29 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 30 |
+
key = self.make_key(key, version=version)
|
| 31 |
+
self.validate_key(key)
|
| 32 |
+
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
| 33 |
+
with self._lock.writer():
|
| 34 |
+
if self._has_expired(key):
|
| 35 |
+
self._set(key, pickled, timeout)
|
| 36 |
+
return True
|
| 37 |
+
return False
|
| 38 |
+
|
| 39 |
+
def get(self, key, default=None, version=None, acquire_lock=True):
|
| 40 |
+
key = self.make_key(key, version=version)
|
| 41 |
+
self.validate_key(key)
|
| 42 |
+
pickled = None
|
| 43 |
+
with (self._lock.reader() if acquire_lock else dummy()):
|
| 44 |
+
if not self._has_expired(key):
|
| 45 |
+
pickled = self._cache[key]
|
| 46 |
+
if pickled is not None:
|
| 47 |
+
try:
|
| 48 |
+
return pickle.loads(pickled)
|
| 49 |
+
except pickle.PickleError:
|
| 50 |
+
return default
|
| 51 |
+
|
| 52 |
+
with (self._lock.writer() if acquire_lock else dummy()):
|
| 53 |
+
try:
|
| 54 |
+
del self._cache[key]
|
| 55 |
+
del self._expire_info[key]
|
| 56 |
+
except KeyError:
|
| 57 |
+
pass
|
| 58 |
+
return default
|
| 59 |
+
|
| 60 |
+
def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
|
| 61 |
+
if len(self._cache) >= self._max_entries:
|
| 62 |
+
self._cull()
|
| 63 |
+
self._cache[key] = value
|
| 64 |
+
self._expire_info[key] = self.get_backend_timeout(timeout)
|
| 65 |
+
|
| 66 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 67 |
+
key = self.make_key(key, version=version)
|
| 68 |
+
self.validate_key(key)
|
| 69 |
+
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
|
| 70 |
+
with self._lock.writer():
|
| 71 |
+
self._set(key, pickled, timeout)
|
| 72 |
+
|
| 73 |
+
def incr(self, key, delta=1, version=None):
|
| 74 |
+
with self._lock.writer():
|
| 75 |
+
value = self.get(key, version=version, acquire_lock=False)
|
| 76 |
+
if value is None:
|
| 77 |
+
raise ValueError("Key '%s' not found" % key)
|
| 78 |
+
new_value = value + delta
|
| 79 |
+
key = self.make_key(key, version=version)
|
| 80 |
+
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
|
| 81 |
+
self._cache[key] = pickled
|
| 82 |
+
return new_value
|
| 83 |
+
|
| 84 |
+
def has_key(self, key, version=None):
|
| 85 |
+
key = self.make_key(key, version=version)
|
| 86 |
+
self.validate_key(key)
|
| 87 |
+
with self._lock.reader():
|
| 88 |
+
if not self._has_expired(key):
|
| 89 |
+
return True
|
| 90 |
+
|
| 91 |
+
with self._lock.writer():
|
| 92 |
+
try:
|
| 93 |
+
del self._cache[key]
|
| 94 |
+
del self._expire_info[key]
|
| 95 |
+
except KeyError:
|
| 96 |
+
pass
|
| 97 |
+
return False
|
| 98 |
+
|
| 99 |
+
def _has_expired(self, key):
|
| 100 |
+
exp = self._expire_info.get(key, -1)
|
| 101 |
+
if exp is None or exp > time.time():
|
| 102 |
+
return False
|
| 103 |
+
return True
|
| 104 |
+
|
| 105 |
+
def _cull(self):
|
| 106 |
+
if self._cull_frequency == 0:
|
| 107 |
+
self.clear()
|
| 108 |
+
else:
|
| 109 |
+
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
|
| 110 |
+
for k in doomed:
|
| 111 |
+
self._delete(k)
|
| 112 |
+
|
| 113 |
+
def _delete(self, key):
|
| 114 |
+
try:
|
| 115 |
+
del self._cache[key]
|
| 116 |
+
except KeyError:
|
| 117 |
+
pass
|
| 118 |
+
try:
|
| 119 |
+
del self._expire_info[key]
|
| 120 |
+
except KeyError:
|
| 121 |
+
pass
|
| 122 |
+
|
| 123 |
+
def delete(self, key, version=None):
|
| 124 |
+
key = self.make_key(key, version=version)
|
| 125 |
+
self.validate_key(key)
|
| 126 |
+
with self._lock.writer():
|
| 127 |
+
self._delete(key)
|
| 128 |
+
|
| 129 |
+
def clear(self):
|
| 130 |
+
self._cache.clear()
|
| 131 |
+
self._expire_info.clear()
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/backends/memcached.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"Memcached cache backend"
|
| 2 |
+
|
| 3 |
+
import pickle
|
| 4 |
+
import re
|
| 5 |
+
import time
|
| 6 |
+
import warnings
|
| 7 |
+
|
| 8 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
| 9 |
+
from django.utils.deprecation import RemovedInDjango21Warning
|
| 10 |
+
from django.utils.functional import cached_property
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class BaseMemcachedCache(BaseCache):
|
| 14 |
+
def __init__(self, server, params, library, value_not_found_exception):
|
| 15 |
+
super().__init__(params)
|
| 16 |
+
if isinstance(server, str):
|
| 17 |
+
self._servers = re.split('[;,]', server)
|
| 18 |
+
else:
|
| 19 |
+
self._servers = server
|
| 20 |
+
|
| 21 |
+
# The exception type to catch from the underlying library for a key
|
| 22 |
+
# that was not found. This is a ValueError for python-memcache,
|
| 23 |
+
# pylibmc.NotFound for pylibmc, and cmemcache will return None without
|
| 24 |
+
# raising an exception.
|
| 25 |
+
self.LibraryValueNotFoundException = value_not_found_exception
|
| 26 |
+
|
| 27 |
+
self._lib = library
|
| 28 |
+
self._options = params.get('OPTIONS') or {}
|
| 29 |
+
|
| 30 |
+
@property
|
| 31 |
+
def _cache(self):
|
| 32 |
+
"""
|
| 33 |
+
Implement transparent thread-safe access to a memcached client.
|
| 34 |
+
"""
|
| 35 |
+
if getattr(self, '_client', None) is None:
|
| 36 |
+
self._client = self._lib.Client(self._servers, **self._options)
|
| 37 |
+
|
| 38 |
+
return self._client
|
| 39 |
+
|
| 40 |
+
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
|
| 41 |
+
"""
|
| 42 |
+
Memcached deals with long (> 30 days) timeouts in a special
|
| 43 |
+
way. Call this function to obtain a safe value for your timeout.
|
| 44 |
+
"""
|
| 45 |
+
if timeout == DEFAULT_TIMEOUT:
|
| 46 |
+
timeout = self.default_timeout
|
| 47 |
+
|
| 48 |
+
if timeout is None:
|
| 49 |
+
# Using 0 in memcache sets a non-expiring timeout.
|
| 50 |
+
return 0
|
| 51 |
+
elif int(timeout) == 0:
|
| 52 |
+
# Other cache backends treat 0 as set-and-expire. To achieve this
|
| 53 |
+
# in memcache backends, a negative timeout must be passed.
|
| 54 |
+
timeout = -1
|
| 55 |
+
|
| 56 |
+
if timeout > 2592000: # 60*60*24*30, 30 days
|
| 57 |
+
# See https://github.com/memcached/memcached/wiki/Programming#expiration
|
| 58 |
+
# "Expiration times can be set from 0, meaning "never expire", to
|
| 59 |
+
# 30 days. Any time higher than 30 days is interpreted as a Unix
|
| 60 |
+
# timestamp date. If you want to expire an object on January 1st of
|
| 61 |
+
# next year, this is how you do that."
|
| 62 |
+
#
|
| 63 |
+
# This means that we have to switch to absolute timestamps.
|
| 64 |
+
timeout += int(time.time())
|
| 65 |
+
return int(timeout)
|
| 66 |
+
|
| 67 |
+
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 68 |
+
key = self.make_key(key, version=version)
|
| 69 |
+
return self._cache.add(key, value, self.get_backend_timeout(timeout))
|
| 70 |
+
|
| 71 |
+
def get(self, key, default=None, version=None):
|
| 72 |
+
key = self.make_key(key, version=version)
|
| 73 |
+
val = self._cache.get(key)
|
| 74 |
+
if val is None:
|
| 75 |
+
return default
|
| 76 |
+
return val
|
| 77 |
+
|
| 78 |
+
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
| 79 |
+
key = self.make_key(key, version=version)
|
| 80 |
+
if not self._cache.set(key, value, self.get_backend_timeout(timeout)):
|
| 81 |
+
# make sure the key doesn't keep its old value in case of failure to set (memcached's 1MB limit)
|
| 82 |
+
self._cache.delete(key)
|
| 83 |
+
|
| 84 |
+
def delete(self, key, version=None):
|
| 85 |
+
key = self.make_key(key, version=version)
|
| 86 |
+
self._cache.delete(key)
|
| 87 |
+
|
| 88 |
+
def get_many(self, keys, version=None):
|
| 89 |
+
new_keys = [self.make_key(x, version=version) for x in keys]
|
| 90 |
+
ret = self._cache.get_multi(new_keys)
|
| 91 |
+
if ret:
|
| 92 |
+
m = dict(zip(new_keys, keys))
|
| 93 |
+
return {m[k]: v for k, v in ret.items()}
|
| 94 |
+
return ret
|
| 95 |
+
|
| 96 |
+
def close(self, **kwargs):
|
| 97 |
+
# Many clients don't clean up connections properly.
|
| 98 |
+
self._cache.disconnect_all()
|
| 99 |
+
|
| 100 |
+
def incr(self, key, delta=1, version=None):
|
| 101 |
+
key = self.make_key(key, version=version)
|
| 102 |
+
# memcached doesn't support a negative delta
|
| 103 |
+
if delta < 0:
|
| 104 |
+
return self._cache.decr(key, -delta)
|
| 105 |
+
try:
|
| 106 |
+
val = self._cache.incr(key, delta)
|
| 107 |
+
|
| 108 |
+
# python-memcache responds to incr on nonexistent keys by
|
| 109 |
+
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
|
| 110 |
+
# and Cmemcache returns None. In all cases,
|
| 111 |
+
# we should raise a ValueError though.
|
| 112 |
+
except self.LibraryValueNotFoundException:
|
| 113 |
+
val = None
|
| 114 |
+
if val is None:
|
| 115 |
+
raise ValueError("Key '%s' not found" % key)
|
| 116 |
+
return val
|
| 117 |
+
|
| 118 |
+
def decr(self, key, delta=1, version=None):
|
| 119 |
+
key = self.make_key(key, version=version)
|
| 120 |
+
# memcached doesn't support a negative delta
|
| 121 |
+
if delta < 0:
|
| 122 |
+
return self._cache.incr(key, -delta)
|
| 123 |
+
try:
|
| 124 |
+
val = self._cache.decr(key, delta)
|
| 125 |
+
|
| 126 |
+
# python-memcache responds to incr on nonexistent keys by
|
| 127 |
+
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
|
| 128 |
+
# and Cmemcache returns None. In all cases,
|
| 129 |
+
# we should raise a ValueError though.
|
| 130 |
+
except self.LibraryValueNotFoundException:
|
| 131 |
+
val = None
|
| 132 |
+
if val is None:
|
| 133 |
+
raise ValueError("Key '%s' not found" % key)
|
| 134 |
+
return val
|
| 135 |
+
|
| 136 |
+
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
|
| 137 |
+
safe_data = {}
|
| 138 |
+
original_keys = {}
|
| 139 |
+
for key, value in data.items():
|
| 140 |
+
safe_key = self.make_key(key, version=version)
|
| 141 |
+
safe_data[safe_key] = value
|
| 142 |
+
original_keys[safe_key] = key
|
| 143 |
+
failed_keys = self._cache.set_multi(safe_data, self.get_backend_timeout(timeout))
|
| 144 |
+
return [original_keys[k] for k in failed_keys]
|
| 145 |
+
|
| 146 |
+
def delete_many(self, keys, version=None):
|
| 147 |
+
self._cache.delete_multi(self.make_key(key, version=version) for key in keys)
|
| 148 |
+
|
| 149 |
+
def clear(self):
|
| 150 |
+
self._cache.flush_all()
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class MemcachedCache(BaseMemcachedCache):
|
| 154 |
+
"An implementation of a cache binding using python-memcached"
|
| 155 |
+
def __init__(self, server, params):
|
| 156 |
+
import memcache
|
| 157 |
+
super().__init__(server, params, library=memcache, value_not_found_exception=ValueError)
|
| 158 |
+
|
| 159 |
+
@property
|
| 160 |
+
def _cache(self):
|
| 161 |
+
if getattr(self, '_client', None) is None:
|
| 162 |
+
client_kwargs = {'pickleProtocol': pickle.HIGHEST_PROTOCOL}
|
| 163 |
+
client_kwargs.update(self._options)
|
| 164 |
+
self._client = self._lib.Client(self._servers, **client_kwargs)
|
| 165 |
+
return self._client
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class PyLibMCCache(BaseMemcachedCache):
|
| 169 |
+
"An implementation of a cache binding using pylibmc"
|
| 170 |
+
def __init__(self, server, params):
|
| 171 |
+
import pylibmc
|
| 172 |
+
super().__init__(server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound)
|
| 173 |
+
|
| 174 |
+
# The contents of `OPTIONS` was formerly only used to set the behaviors
|
| 175 |
+
# attribute, but is now passed directly to the Client constructor. As such,
|
| 176 |
+
# any options that don't match a valid keyword argument are removed and set
|
| 177 |
+
# under the `behaviors` key instead, to maintain backwards compatibility.
|
| 178 |
+
legacy_behaviors = {}
|
| 179 |
+
for option in list(self._options):
|
| 180 |
+
if option not in ('behaviors', 'binary', 'username', 'password'):
|
| 181 |
+
warnings.warn(
|
| 182 |
+
"Specifying pylibmc cache behaviors as a top-level property "
|
| 183 |
+
"within `OPTIONS` is deprecated. Move `%s` into a dict named "
|
| 184 |
+
"`behaviors` inside `OPTIONS` instead." % option,
|
| 185 |
+
RemovedInDjango21Warning,
|
| 186 |
+
stacklevel=2,
|
| 187 |
+
)
|
| 188 |
+
legacy_behaviors[option] = self._options.pop(option)
|
| 189 |
+
|
| 190 |
+
if legacy_behaviors:
|
| 191 |
+
self._options.setdefault('behaviors', {}).update(legacy_behaviors)
|
| 192 |
+
|
| 193 |
+
@cached_property
|
| 194 |
+
def _cache(self):
|
| 195 |
+
return self._lib.Client(self._servers, **self._options)
|
| 196 |
+
|
| 197 |
+
def close(self, **kwargs):
|
| 198 |
+
# libmemcached manages its own connections. Don't call disconnect_all()
|
| 199 |
+
# as it resets the failover state and creates unnecessary reconnects.
|
| 200 |
+
pass
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/cache/utils.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
from urllib.parse import quote
|
| 3 |
+
|
| 4 |
+
from django.utils.encoding import force_bytes
|
| 5 |
+
|
| 6 |
+
TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def make_template_fragment_key(fragment_name, vary_on=None):
|
| 10 |
+
if vary_on is None:
|
| 11 |
+
vary_on = ()
|
| 12 |
+
key = ':'.join(quote(str(var)) for var in vary_on)
|
| 13 |
+
args = hashlib.md5(force_bytes(key))
|
| 14 |
+
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/__init__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .messages import (
|
| 2 |
+
CRITICAL, DEBUG, ERROR, INFO, WARNING, CheckMessage, Critical, Debug,
|
| 3 |
+
Error, Info, Warning,
|
| 4 |
+
)
|
| 5 |
+
from .registry import Tags, register, run_checks, tag_exists
|
| 6 |
+
|
| 7 |
+
# Import these to force registration of checks
|
| 8 |
+
import django.core.checks.caches # NOQA isort:skip
|
| 9 |
+
import django.core.checks.database # NOQA isort:skip
|
| 10 |
+
import django.core.checks.model_checks # NOQA isort:skip
|
| 11 |
+
import django.core.checks.security.base # NOQA isort:skip
|
| 12 |
+
import django.core.checks.security.csrf # NOQA isort:skip
|
| 13 |
+
import django.core.checks.security.sessions # NOQA isort:skip
|
| 14 |
+
import django.core.checks.templates # NOQA isort:skip
|
| 15 |
+
import django.core.checks.urls # NOQA isort:skip
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
__all__ = [
|
| 19 |
+
'CheckMessage',
|
| 20 |
+
'Debug', 'Info', 'Warning', 'Error', 'Critical',
|
| 21 |
+
'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL',
|
| 22 |
+
'register', 'run_checks', 'tag_exists', 'Tags',
|
| 23 |
+
]
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/caches.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
from django.core.cache import DEFAULT_CACHE_ALIAS
|
| 3 |
+
|
| 4 |
+
from . import Error, Tags, register
|
| 5 |
+
|
| 6 |
+
E001 = Error(
|
| 7 |
+
"You must define a '%s' cache in your CACHES setting." % DEFAULT_CACHE_ALIAS,
|
| 8 |
+
id='caches.E001',
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@register(Tags.caches)
|
| 13 |
+
def check_default_cache_is_configured(app_configs, **kwargs):
|
| 14 |
+
if DEFAULT_CACHE_ALIAS not in settings.CACHES:
|
| 15 |
+
return [E001]
|
| 16 |
+
return []
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/database.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.db import connections
|
| 2 |
+
|
| 3 |
+
from . import Tags, register
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@register(Tags.database)
|
| 7 |
+
def check_database_backends(*args, **kwargs):
|
| 8 |
+
issues = []
|
| 9 |
+
for conn in connections.all():
|
| 10 |
+
issues.extend(conn.validation.check(**kwargs))
|
| 11 |
+
return issues
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/messages.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Levels
|
| 2 |
+
DEBUG = 10
|
| 3 |
+
INFO = 20
|
| 4 |
+
WARNING = 30
|
| 5 |
+
ERROR = 40
|
| 6 |
+
CRITICAL = 50
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class CheckMessage:
|
| 10 |
+
|
| 11 |
+
def __init__(self, level, msg, hint=None, obj=None, id=None):
|
| 12 |
+
assert isinstance(level, int), "The first argument should be level."
|
| 13 |
+
self.level = level
|
| 14 |
+
self.msg = msg
|
| 15 |
+
self.hint = hint
|
| 16 |
+
self.obj = obj
|
| 17 |
+
self.id = id
|
| 18 |
+
|
| 19 |
+
def __eq__(self, other):
|
| 20 |
+
return (
|
| 21 |
+
isinstance(other, self.__class__) and
|
| 22 |
+
all(getattr(self, attr) == getattr(other, attr)
|
| 23 |
+
for attr in ['level', 'msg', 'hint', 'obj', 'id'])
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
def __str__(self):
|
| 27 |
+
from django.db import models
|
| 28 |
+
|
| 29 |
+
if self.obj is None:
|
| 30 |
+
obj = "?"
|
| 31 |
+
elif isinstance(self.obj, models.base.ModelBase):
|
| 32 |
+
# We need to hardcode ModelBase and Field cases because its __str__
|
| 33 |
+
# method doesn't return "applabel.modellabel" and cannot be changed.
|
| 34 |
+
obj = self.obj._meta.label
|
| 35 |
+
else:
|
| 36 |
+
obj = str(self.obj)
|
| 37 |
+
id = "(%s) " % self.id if self.id else ""
|
| 38 |
+
hint = "\n\tHINT: %s" % self.hint if self.hint else ''
|
| 39 |
+
return "%s: %s%s%s" % (obj, id, self.msg, hint)
|
| 40 |
+
|
| 41 |
+
def __repr__(self):
|
| 42 |
+
return "<%s: level=%r, msg=%r, hint=%r, obj=%r, id=%r>" % \
|
| 43 |
+
(self.__class__.__name__, self.level, self.msg, self.hint, self.obj, self.id)
|
| 44 |
+
|
| 45 |
+
def is_serious(self, level=ERROR):
|
| 46 |
+
return self.level >= level
|
| 47 |
+
|
| 48 |
+
def is_silenced(self):
|
| 49 |
+
from django.conf import settings
|
| 50 |
+
return self.id in settings.SILENCED_SYSTEM_CHECKS
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class Debug(CheckMessage):
|
| 54 |
+
def __init__(self, *args, **kwargs):
|
| 55 |
+
super().__init__(DEBUG, *args, **kwargs)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class Info(CheckMessage):
|
| 59 |
+
def __init__(self, *args, **kwargs):
|
| 60 |
+
super().__init__(INFO, *args, **kwargs)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Warning(CheckMessage):
|
| 64 |
+
def __init__(self, *args, **kwargs):
|
| 65 |
+
super().__init__(WARNING, *args, **kwargs)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class Error(CheckMessage):
|
| 69 |
+
def __init__(self, *args, **kwargs):
|
| 70 |
+
super().__init__(ERROR, *args, **kwargs)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class Critical(CheckMessage):
|
| 74 |
+
def __init__(self, *args, **kwargs):
|
| 75 |
+
super().__init__(CRITICAL, *args, **kwargs)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/model_checks.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import inspect
|
| 2 |
+
import types
|
| 3 |
+
from itertools import chain
|
| 4 |
+
|
| 5 |
+
from django.apps import apps
|
| 6 |
+
from django.core.checks import Error, Tags, register
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@register(Tags.models)
|
| 10 |
+
def check_all_models(app_configs=None, **kwargs):
|
| 11 |
+
errors = []
|
| 12 |
+
if app_configs is None:
|
| 13 |
+
models = apps.get_models()
|
| 14 |
+
else:
|
| 15 |
+
models = chain.from_iterable(app_config.get_models() for app_config in app_configs)
|
| 16 |
+
for model in models:
|
| 17 |
+
if not inspect.ismethod(model.check):
|
| 18 |
+
errors.append(
|
| 19 |
+
Error(
|
| 20 |
+
"The '%s.check()' class method is currently overridden by %r."
|
| 21 |
+
% (model.__name__, model.check),
|
| 22 |
+
obj=model,
|
| 23 |
+
id='models.E020'
|
| 24 |
+
)
|
| 25 |
+
)
|
| 26 |
+
else:
|
| 27 |
+
errors.extend(model.check(**kwargs))
|
| 28 |
+
return errors
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _check_lazy_references(apps, ignore=None):
|
| 32 |
+
"""
|
| 33 |
+
Ensure all lazy (i.e. string) model references have been resolved.
|
| 34 |
+
|
| 35 |
+
Lazy references are used in various places throughout Django, primarily in
|
| 36 |
+
related fields and model signals. Identify those common cases and provide
|
| 37 |
+
more helpful error messages for them.
|
| 38 |
+
|
| 39 |
+
The ignore parameter is used by StateApps to exclude swappable models from
|
| 40 |
+
this check.
|
| 41 |
+
"""
|
| 42 |
+
pending_models = set(apps._pending_operations) - (ignore or set())
|
| 43 |
+
|
| 44 |
+
# Short circuit if there aren't any errors.
|
| 45 |
+
if not pending_models:
|
| 46 |
+
return []
|
| 47 |
+
|
| 48 |
+
from django.db.models import signals
|
| 49 |
+
model_signals = {
|
| 50 |
+
signal: name for name, signal in vars(signals).items()
|
| 51 |
+
if isinstance(signal, signals.ModelSignal)
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
def extract_operation(obj):
|
| 55 |
+
"""
|
| 56 |
+
Take a callable found in Apps._pending_operations and identify the
|
| 57 |
+
original callable passed to Apps.lazy_model_operation(). If that
|
| 58 |
+
callable was a partial, return the inner, non-partial function and
|
| 59 |
+
any arguments and keyword arguments that were supplied with it.
|
| 60 |
+
|
| 61 |
+
obj is a callback defined locally in Apps.lazy_model_operation() and
|
| 62 |
+
annotated there with a `func` attribute so as to imitate a partial.
|
| 63 |
+
"""
|
| 64 |
+
operation, args, keywords = obj, [], {}
|
| 65 |
+
while hasattr(operation, 'func'):
|
| 66 |
+
# The or clauses are redundant but work around a bug (#25945) in
|
| 67 |
+
# functools.partial in Python <= 3.5.1.
|
| 68 |
+
args.extend(getattr(operation, 'args', []) or [])
|
| 69 |
+
keywords.update(getattr(operation, 'keywords', {}) or {})
|
| 70 |
+
operation = operation.func
|
| 71 |
+
return operation, args, keywords
|
| 72 |
+
|
| 73 |
+
def app_model_error(model_key):
|
| 74 |
+
try:
|
| 75 |
+
apps.get_app_config(model_key[0])
|
| 76 |
+
model_error = "app '%s' doesn't provide model '%s'" % model_key
|
| 77 |
+
except LookupError:
|
| 78 |
+
model_error = "app '%s' isn't installed" % model_key[0]
|
| 79 |
+
return model_error
|
| 80 |
+
|
| 81 |
+
# Here are several functions which return CheckMessage instances for the
|
| 82 |
+
# most common usages of lazy operations throughout Django. These functions
|
| 83 |
+
# take the model that was being waited on as an (app_label, modelname)
|
| 84 |
+
# pair, the original lazy function, and its positional and keyword args as
|
| 85 |
+
# determined by extract_operation().
|
| 86 |
+
|
| 87 |
+
def field_error(model_key, func, args, keywords):
|
| 88 |
+
error_msg = (
|
| 89 |
+
"The field %(field)s was declared with a lazy reference "
|
| 90 |
+
"to '%(model)s', but %(model_error)s."
|
| 91 |
+
)
|
| 92 |
+
params = {
|
| 93 |
+
'model': '.'.join(model_key),
|
| 94 |
+
'field': keywords['field'],
|
| 95 |
+
'model_error': app_model_error(model_key),
|
| 96 |
+
}
|
| 97 |
+
return Error(error_msg % params, obj=keywords['field'], id='fields.E307')
|
| 98 |
+
|
| 99 |
+
def signal_connect_error(model_key, func, args, keywords):
|
| 100 |
+
error_msg = (
|
| 101 |
+
"%(receiver)s was connected to the '%(signal)s' signal with a "
|
| 102 |
+
"lazy reference to the sender '%(model)s', but %(model_error)s."
|
| 103 |
+
)
|
| 104 |
+
receiver = args[0]
|
| 105 |
+
# The receiver is either a function or an instance of class
|
| 106 |
+
# defining a `__call__` method.
|
| 107 |
+
if isinstance(receiver, types.FunctionType):
|
| 108 |
+
description = "The function '%s'" % receiver.__name__
|
| 109 |
+
elif isinstance(receiver, types.MethodType):
|
| 110 |
+
description = "Bound method '%s.%s'" % (receiver.__self__.__class__.__name__, receiver.__name__)
|
| 111 |
+
else:
|
| 112 |
+
description = "An instance of class '%s'" % receiver.__class__.__name__
|
| 113 |
+
signal_name = model_signals.get(func.__self__, 'unknown')
|
| 114 |
+
params = {
|
| 115 |
+
'model': '.'.join(model_key),
|
| 116 |
+
'receiver': description,
|
| 117 |
+
'signal': signal_name,
|
| 118 |
+
'model_error': app_model_error(model_key),
|
| 119 |
+
}
|
| 120 |
+
return Error(error_msg % params, obj=receiver.__module__, id='signals.E001')
|
| 121 |
+
|
| 122 |
+
def default_error(model_key, func, args, keywords):
|
| 123 |
+
error_msg = "%(op)s contains a lazy reference to %(model)s, but %(model_error)s."
|
| 124 |
+
params = {
|
| 125 |
+
'op': func,
|
| 126 |
+
'model': '.'.join(model_key),
|
| 127 |
+
'model_error': app_model_error(model_key),
|
| 128 |
+
}
|
| 129 |
+
return Error(error_msg % params, obj=func, id='models.E022')
|
| 130 |
+
|
| 131 |
+
# Maps common uses of lazy operations to corresponding error functions
|
| 132 |
+
# defined above. If a key maps to None, no error will be produced.
|
| 133 |
+
# default_error() will be used for usages that don't appear in this dict.
|
| 134 |
+
known_lazy = {
|
| 135 |
+
('django.db.models.fields.related', 'resolve_related_class'): field_error,
|
| 136 |
+
('django.db.models.fields.related', 'set_managed'): None,
|
| 137 |
+
('django.dispatch.dispatcher', 'connect'): signal_connect_error,
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
def build_error(model_key, func, args, keywords):
|
| 141 |
+
key = (func.__module__, func.__name__)
|
| 142 |
+
error_fn = known_lazy.get(key, default_error)
|
| 143 |
+
return error_fn(model_key, func, args, keywords) if error_fn else None
|
| 144 |
+
|
| 145 |
+
return sorted(filter(None, (
|
| 146 |
+
build_error(model_key, *extract_operation(func))
|
| 147 |
+
for model_key in pending_models
|
| 148 |
+
for func in apps._pending_operations[model_key]
|
| 149 |
+
)), key=lambda error: error.msg)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
@register(Tags.models)
|
| 153 |
+
def check_lazy_references(app_configs=None, **kwargs):
|
| 154 |
+
return _check_lazy_references(apps)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/registry.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import chain
|
| 2 |
+
|
| 3 |
+
from django.utils.itercompat import is_iterable
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Tags:
|
| 7 |
+
"""
|
| 8 |
+
Built-in tags for internal checks.
|
| 9 |
+
"""
|
| 10 |
+
admin = 'admin'
|
| 11 |
+
caches = 'caches'
|
| 12 |
+
compatibility = 'compatibility'
|
| 13 |
+
database = 'database'
|
| 14 |
+
models = 'models'
|
| 15 |
+
security = 'security'
|
| 16 |
+
signals = 'signals'
|
| 17 |
+
templates = 'templates'
|
| 18 |
+
urls = 'urls'
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class CheckRegistry:
|
| 22 |
+
|
| 23 |
+
def __init__(self):
|
| 24 |
+
self.registered_checks = set()
|
| 25 |
+
self.deployment_checks = set()
|
| 26 |
+
|
| 27 |
+
def register(self, check=None, *tags, **kwargs):
|
| 28 |
+
"""
|
| 29 |
+
Can be used as a function or a decorator. Register given function
|
| 30 |
+
`f` labeled with given `tags`. The function should receive **kwargs
|
| 31 |
+
and return list of Errors and Warnings.
|
| 32 |
+
|
| 33 |
+
Example::
|
| 34 |
+
|
| 35 |
+
registry = CheckRegistry()
|
| 36 |
+
@registry.register('mytag', 'anothertag')
|
| 37 |
+
def my_check(apps, **kwargs):
|
| 38 |
+
# ... perform checks and collect `errors` ...
|
| 39 |
+
return errors
|
| 40 |
+
# or
|
| 41 |
+
registry.register(my_check, 'mytag', 'anothertag')
|
| 42 |
+
"""
|
| 43 |
+
kwargs.setdefault('deploy', False)
|
| 44 |
+
|
| 45 |
+
def inner(check):
|
| 46 |
+
check.tags = tags
|
| 47 |
+
checks = self.deployment_checks if kwargs['deploy'] else self.registered_checks
|
| 48 |
+
checks.add(check)
|
| 49 |
+
return check
|
| 50 |
+
|
| 51 |
+
if callable(check):
|
| 52 |
+
return inner(check)
|
| 53 |
+
else:
|
| 54 |
+
if check:
|
| 55 |
+
tags += (check, )
|
| 56 |
+
return inner
|
| 57 |
+
|
| 58 |
+
def run_checks(self, app_configs=None, tags=None, include_deployment_checks=False):
|
| 59 |
+
"""
|
| 60 |
+
Run all registered checks and return list of Errors and Warnings.
|
| 61 |
+
"""
|
| 62 |
+
errors = []
|
| 63 |
+
checks = self.get_checks(include_deployment_checks)
|
| 64 |
+
|
| 65 |
+
if tags is not None:
|
| 66 |
+
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
|
| 67 |
+
else:
|
| 68 |
+
# By default, 'database'-tagged checks are not run as they do more
|
| 69 |
+
# than mere static code analysis.
|
| 70 |
+
checks = [check for check in checks if Tags.database not in check.tags]
|
| 71 |
+
|
| 72 |
+
for check in checks:
|
| 73 |
+
new_errors = check(app_configs=app_configs)
|
| 74 |
+
assert is_iterable(new_errors), (
|
| 75 |
+
"The function %r did not return a list. All functions registered "
|
| 76 |
+
"with the checks registry must return a list." % check)
|
| 77 |
+
errors.extend(new_errors)
|
| 78 |
+
return errors
|
| 79 |
+
|
| 80 |
+
def tag_exists(self, tag, include_deployment_checks=False):
|
| 81 |
+
return tag in self.tags_available(include_deployment_checks)
|
| 82 |
+
|
| 83 |
+
def tags_available(self, deployment_checks=False):
|
| 84 |
+
return set(chain.from_iterable(
|
| 85 |
+
check.tags for check in self.get_checks(deployment_checks)
|
| 86 |
+
))
|
| 87 |
+
|
| 88 |
+
def get_checks(self, include_deployment_checks=False):
|
| 89 |
+
checks = list(self.registered_checks)
|
| 90 |
+
if include_deployment_checks:
|
| 91 |
+
checks.extend(self.deployment_checks)
|
| 92 |
+
return checks
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
registry = CheckRegistry()
|
| 96 |
+
register = registry.register
|
| 97 |
+
run_checks = registry.run_checks
|
| 98 |
+
tag_exists = registry.tag_exists
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/base.py
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
|
| 3 |
+
from .. import Tags, Warning, register
|
| 4 |
+
|
| 5 |
+
SECRET_KEY_MIN_LENGTH = 50
|
| 6 |
+
SECRET_KEY_MIN_UNIQUE_CHARACTERS = 5
|
| 7 |
+
|
| 8 |
+
W001 = Warning(
|
| 9 |
+
"You do not have 'django.middleware.security.SecurityMiddleware' "
|
| 10 |
+
"in your MIDDLEWARE so the SECURE_HSTS_SECONDS, "
|
| 11 |
+
"SECURE_CONTENT_TYPE_NOSNIFF, "
|
| 12 |
+
"SECURE_BROWSER_XSS_FILTER, and SECURE_SSL_REDIRECT settings "
|
| 13 |
+
"will have no effect.",
|
| 14 |
+
id='security.W001',
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
W002 = Warning(
|
| 18 |
+
"You do not have "
|
| 19 |
+
"'django.middleware.clickjacking.XFrameOptionsMiddleware' in your "
|
| 20 |
+
"MIDDLEWARE, so your pages will not be served with an "
|
| 21 |
+
"'x-frame-options' header. Unless there is a good reason for your "
|
| 22 |
+
"site to be served in a frame, you should consider enabling this "
|
| 23 |
+
"header to help prevent clickjacking attacks.",
|
| 24 |
+
id='security.W002',
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
W004 = Warning(
|
| 28 |
+
"You have not set a value for the SECURE_HSTS_SECONDS setting. "
|
| 29 |
+
"If your entire site is served only over SSL, you may want to consider "
|
| 30 |
+
"setting a value and enabling HTTP Strict Transport Security. "
|
| 31 |
+
"Be sure to read the documentation first; enabling HSTS carelessly "
|
| 32 |
+
"can cause serious, irreversible problems.",
|
| 33 |
+
id='security.W004',
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
W005 = Warning(
|
| 37 |
+
"You have not set the SECURE_HSTS_INCLUDE_SUBDOMAINS setting to True. "
|
| 38 |
+
"Without this, your site is potentially vulnerable to attack "
|
| 39 |
+
"via an insecure connection to a subdomain. Only set this to True if "
|
| 40 |
+
"you are certain that all subdomains of your domain should be served "
|
| 41 |
+
"exclusively via SSL.",
|
| 42 |
+
id='security.W005',
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
W006 = Warning(
|
| 46 |
+
"Your SECURE_CONTENT_TYPE_NOSNIFF setting is not set to True, "
|
| 47 |
+
"so your pages will not be served with an "
|
| 48 |
+
"'x-content-type-options: nosniff' header. "
|
| 49 |
+
"You should consider enabling this header to prevent the "
|
| 50 |
+
"browser from identifying content types incorrectly.",
|
| 51 |
+
id='security.W006',
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
W007 = Warning(
|
| 55 |
+
"Your SECURE_BROWSER_XSS_FILTER setting is not set to True, "
|
| 56 |
+
"so your pages will not be served with an "
|
| 57 |
+
"'x-xss-protection: 1; mode=block' header. "
|
| 58 |
+
"You should consider enabling this header to activate the "
|
| 59 |
+
"browser's XSS filtering and help prevent XSS attacks.",
|
| 60 |
+
id='security.W007',
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
W008 = Warning(
|
| 64 |
+
"Your SECURE_SSL_REDIRECT setting is not set to True. "
|
| 65 |
+
"Unless your site should be available over both SSL and non-SSL "
|
| 66 |
+
"connections, you may want to either set this setting True "
|
| 67 |
+
"or configure a load balancer or reverse-proxy server "
|
| 68 |
+
"to redirect all connections to HTTPS.",
|
| 69 |
+
id='security.W008',
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
W009 = Warning(
|
| 73 |
+
"Your SECRET_KEY has less than %(min_length)s characters or less than "
|
| 74 |
+
"%(min_unique_chars)s unique characters. Please generate a long and random "
|
| 75 |
+
"SECRET_KEY, otherwise many of Django's security-critical features will be "
|
| 76 |
+
"vulnerable to attack." % {
|
| 77 |
+
'min_length': SECRET_KEY_MIN_LENGTH,
|
| 78 |
+
'min_unique_chars': SECRET_KEY_MIN_UNIQUE_CHARACTERS,
|
| 79 |
+
},
|
| 80 |
+
id='security.W009',
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
W018 = Warning(
|
| 84 |
+
"You should not have DEBUG set to True in deployment.",
|
| 85 |
+
id='security.W018',
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
W019 = Warning(
|
| 89 |
+
"You have "
|
| 90 |
+
"'django.middleware.clickjacking.XFrameOptionsMiddleware' in your "
|
| 91 |
+
"MIDDLEWARE, but X_FRAME_OPTIONS is not set to 'DENY'. "
|
| 92 |
+
"The default is 'SAMEORIGIN', but unless there is a good reason for "
|
| 93 |
+
"your site to serve other parts of itself in a frame, you should "
|
| 94 |
+
"change it to 'DENY'.",
|
| 95 |
+
id='security.W019',
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
W020 = Warning(
|
| 99 |
+
"ALLOWED_HOSTS must not be empty in deployment.",
|
| 100 |
+
id='security.W020',
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
W021 = Warning(
|
| 104 |
+
"You have not set the SECURE_HSTS_PRELOAD setting to True. Without this, "
|
| 105 |
+
"your site cannot be submitted to the browser preload list.",
|
| 106 |
+
id='security.W021',
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _security_middleware():
|
| 111 |
+
return 'django.middleware.security.SecurityMiddleware' in settings.MIDDLEWARE
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _xframe_middleware():
|
| 115 |
+
return 'django.middleware.clickjacking.XFrameOptionsMiddleware' in settings.MIDDLEWARE
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@register(Tags.security, deploy=True)
|
| 119 |
+
def check_security_middleware(app_configs, **kwargs):
|
| 120 |
+
passed_check = _security_middleware()
|
| 121 |
+
return [] if passed_check else [W001]
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@register(Tags.security, deploy=True)
|
| 125 |
+
def check_xframe_options_middleware(app_configs, **kwargs):
|
| 126 |
+
passed_check = _xframe_middleware()
|
| 127 |
+
return [] if passed_check else [W002]
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
@register(Tags.security, deploy=True)
|
| 131 |
+
def check_sts(app_configs, **kwargs):
|
| 132 |
+
passed_check = not _security_middleware() or settings.SECURE_HSTS_SECONDS
|
| 133 |
+
return [] if passed_check else [W004]
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@register(Tags.security, deploy=True)
|
| 137 |
+
def check_sts_include_subdomains(app_configs, **kwargs):
|
| 138 |
+
passed_check = (
|
| 139 |
+
not _security_middleware() or
|
| 140 |
+
not settings.SECURE_HSTS_SECONDS or
|
| 141 |
+
settings.SECURE_HSTS_INCLUDE_SUBDOMAINS is True
|
| 142 |
+
)
|
| 143 |
+
return [] if passed_check else [W005]
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
@register(Tags.security, deploy=True)
|
| 147 |
+
def check_sts_preload(app_configs, **kwargs):
|
| 148 |
+
passed_check = (
|
| 149 |
+
not _security_middleware() or
|
| 150 |
+
not settings.SECURE_HSTS_SECONDS or
|
| 151 |
+
settings.SECURE_HSTS_PRELOAD is True
|
| 152 |
+
)
|
| 153 |
+
return [] if passed_check else [W021]
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
@register(Tags.security, deploy=True)
|
| 157 |
+
def check_content_type_nosniff(app_configs, **kwargs):
|
| 158 |
+
passed_check = (
|
| 159 |
+
not _security_middleware() or
|
| 160 |
+
settings.SECURE_CONTENT_TYPE_NOSNIFF is True
|
| 161 |
+
)
|
| 162 |
+
return [] if passed_check else [W006]
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
@register(Tags.security, deploy=True)
|
| 166 |
+
def check_xss_filter(app_configs, **kwargs):
|
| 167 |
+
passed_check = (
|
| 168 |
+
not _security_middleware() or
|
| 169 |
+
settings.SECURE_BROWSER_XSS_FILTER is True
|
| 170 |
+
)
|
| 171 |
+
return [] if passed_check else [W007]
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@register(Tags.security, deploy=True)
|
| 175 |
+
def check_ssl_redirect(app_configs, **kwargs):
|
| 176 |
+
passed_check = (
|
| 177 |
+
not _security_middleware() or
|
| 178 |
+
settings.SECURE_SSL_REDIRECT is True
|
| 179 |
+
)
|
| 180 |
+
return [] if passed_check else [W008]
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
@register(Tags.security, deploy=True)
|
| 184 |
+
def check_secret_key(app_configs, **kwargs):
|
| 185 |
+
passed_check = (
|
| 186 |
+
getattr(settings, 'SECRET_KEY', None) and
|
| 187 |
+
len(set(settings.SECRET_KEY)) >= SECRET_KEY_MIN_UNIQUE_CHARACTERS and
|
| 188 |
+
len(settings.SECRET_KEY) >= SECRET_KEY_MIN_LENGTH
|
| 189 |
+
)
|
| 190 |
+
return [] if passed_check else [W009]
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
@register(Tags.security, deploy=True)
|
| 194 |
+
def check_debug(app_configs, **kwargs):
|
| 195 |
+
passed_check = not settings.DEBUG
|
| 196 |
+
return [] if passed_check else [W018]
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@register(Tags.security, deploy=True)
|
| 200 |
+
def check_xframe_deny(app_configs, **kwargs):
|
| 201 |
+
passed_check = (
|
| 202 |
+
not _xframe_middleware() or
|
| 203 |
+
settings.X_FRAME_OPTIONS == 'DENY'
|
| 204 |
+
)
|
| 205 |
+
return [] if passed_check else [W019]
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
@register(Tags.security, deploy=True)
|
| 209 |
+
def check_allowed_hosts(app_configs, **kwargs):
|
| 210 |
+
return [] if settings.ALLOWED_HOSTS else [W020]
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/csrf.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
|
| 3 |
+
from .. import Tags, Warning, register
|
| 4 |
+
|
| 5 |
+
W003 = Warning(
|
| 6 |
+
"You don't appear to be using Django's built-in "
|
| 7 |
+
"cross-site request forgery protection via the middleware "
|
| 8 |
+
"('django.middleware.csrf.CsrfViewMiddleware' is not in your "
|
| 9 |
+
"MIDDLEWARE). Enabling the middleware is the safest approach "
|
| 10 |
+
"to ensure you don't leave any holes.",
|
| 11 |
+
id='security.W003',
|
| 12 |
+
)
|
| 13 |
+
|
| 14 |
+
W016 = Warning(
|
| 15 |
+
"You have 'django.middleware.csrf.CsrfViewMiddleware' in your "
|
| 16 |
+
"MIDDLEWARE, but you have not set CSRF_COOKIE_SECURE to True. "
|
| 17 |
+
"Using a secure-only CSRF cookie makes it more difficult for network "
|
| 18 |
+
"traffic sniffers to steal the CSRF token.",
|
| 19 |
+
id='security.W016',
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _csrf_middleware():
|
| 24 |
+
return 'django.middleware.csrf.CsrfViewMiddleware' in settings.MIDDLEWARE
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@register(Tags.security, deploy=True)
|
| 28 |
+
def check_csrf_middleware(app_configs, **kwargs):
|
| 29 |
+
passed_check = _csrf_middleware()
|
| 30 |
+
return [] if passed_check else [W003]
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@register(Tags.security, deploy=True)
|
| 34 |
+
def check_csrf_cookie_secure(app_configs, **kwargs):
|
| 35 |
+
passed_check = (
|
| 36 |
+
settings.CSRF_USE_SESSIONS or
|
| 37 |
+
not _csrf_middleware() or
|
| 38 |
+
settings.CSRF_COOKIE_SECURE
|
| 39 |
+
)
|
| 40 |
+
return [] if passed_check else [W016]
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/security/sessions.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.conf import settings
|
| 2 |
+
|
| 3 |
+
from .. import Tags, Warning, register
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def add_session_cookie_message(message):
|
| 7 |
+
return message + (
|
| 8 |
+
" Using a secure-only session cookie makes it more difficult for "
|
| 9 |
+
"network traffic sniffers to hijack user sessions."
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
W010 = Warning(
|
| 14 |
+
add_session_cookie_message(
|
| 15 |
+
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
|
| 16 |
+
"but you have not set SESSION_COOKIE_SECURE to True."
|
| 17 |
+
),
|
| 18 |
+
id='security.W010',
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
W011 = Warning(
|
| 22 |
+
add_session_cookie_message(
|
| 23 |
+
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
|
| 24 |
+
"in your MIDDLEWARE, but you have not set "
|
| 25 |
+
"SESSION_COOKIE_SECURE to True."
|
| 26 |
+
),
|
| 27 |
+
id='security.W011',
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
W012 = Warning(
|
| 31 |
+
add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."),
|
| 32 |
+
id='security.W012',
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def add_httponly_message(message):
|
| 37 |
+
return message + (
|
| 38 |
+
" Using an HttpOnly session cookie makes it more difficult for "
|
| 39 |
+
"cross-site scripting attacks to hijack user sessions."
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
W013 = Warning(
|
| 44 |
+
add_httponly_message(
|
| 45 |
+
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
|
| 46 |
+
"but you have not set SESSION_COOKIE_HTTPONLY to True.",
|
| 47 |
+
),
|
| 48 |
+
id='security.W013',
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
W014 = Warning(
|
| 52 |
+
add_httponly_message(
|
| 53 |
+
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
|
| 54 |
+
"in your MIDDLEWARE, but you have not set "
|
| 55 |
+
"SESSION_COOKIE_HTTPONLY to True."
|
| 56 |
+
),
|
| 57 |
+
id='security.W014',
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
W015 = Warning(
|
| 61 |
+
add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."),
|
| 62 |
+
id='security.W015',
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@register(Tags.security, deploy=True)
|
| 67 |
+
def check_session_cookie_secure(app_configs, **kwargs):
|
| 68 |
+
errors = []
|
| 69 |
+
if not settings.SESSION_COOKIE_SECURE:
|
| 70 |
+
if _session_app():
|
| 71 |
+
errors.append(W010)
|
| 72 |
+
if _session_middleware():
|
| 73 |
+
errors.append(W011)
|
| 74 |
+
if len(errors) > 1:
|
| 75 |
+
errors = [W012]
|
| 76 |
+
return errors
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@register(Tags.security, deploy=True)
|
| 80 |
+
def check_session_cookie_httponly(app_configs, **kwargs):
|
| 81 |
+
errors = []
|
| 82 |
+
if not settings.SESSION_COOKIE_HTTPONLY:
|
| 83 |
+
if _session_app():
|
| 84 |
+
errors.append(W013)
|
| 85 |
+
if _session_middleware():
|
| 86 |
+
errors.append(W014)
|
| 87 |
+
if len(errors) > 1:
|
| 88 |
+
errors = [W015]
|
| 89 |
+
return errors
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
def _session_middleware():
|
| 93 |
+
return 'django.contrib.sessions.middleware.SessionMiddleware' in settings.MIDDLEWARE
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _session_app():
|
| 97 |
+
return "django.contrib.sessions" in settings.INSTALLED_APPS
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/templates.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
|
| 3 |
+
from django.conf import settings
|
| 4 |
+
|
| 5 |
+
from . import Error, Tags, register
|
| 6 |
+
|
| 7 |
+
E001 = Error(
|
| 8 |
+
"You have 'APP_DIRS': True in your TEMPLATES but also specify 'loaders' "
|
| 9 |
+
"in OPTIONS. Either remove APP_DIRS or remove the 'loaders' option.",
|
| 10 |
+
id='templates.E001',
|
| 11 |
+
)
|
| 12 |
+
E002 = Error(
|
| 13 |
+
"'string_if_invalid' in TEMPLATES OPTIONS must be a string but got: {} ({}).",
|
| 14 |
+
id="templates.E002",
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@register(Tags.templates)
|
| 19 |
+
def check_setting_app_dirs_loaders(app_configs, **kwargs):
|
| 20 |
+
passed_check = True
|
| 21 |
+
for conf in settings.TEMPLATES:
|
| 22 |
+
if not conf.get('APP_DIRS'):
|
| 23 |
+
continue
|
| 24 |
+
if 'loaders' in conf.get('OPTIONS', {}):
|
| 25 |
+
passed_check = False
|
| 26 |
+
return [] if passed_check else [E001]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@register(Tags.templates)
|
| 30 |
+
def check_string_if_invalid_is_string(app_configs, **kwargs):
|
| 31 |
+
errors = []
|
| 32 |
+
for conf in settings.TEMPLATES:
|
| 33 |
+
string_if_invalid = conf.get('OPTIONS', {}).get('string_if_invalid', '')
|
| 34 |
+
if not isinstance(string_if_invalid, str):
|
| 35 |
+
error = copy.copy(E002)
|
| 36 |
+
error.msg = error.msg.format(string_if_invalid, type(string_if_invalid).__name__)
|
| 37 |
+
errors.append(error)
|
| 38 |
+
return errors
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/checks/urls.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import Counter
|
| 2 |
+
|
| 3 |
+
from django.conf import settings
|
| 4 |
+
|
| 5 |
+
from . import Error, Tags, Warning, register
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
@register(Tags.urls)
|
| 9 |
+
def check_url_config(app_configs, **kwargs):
|
| 10 |
+
if getattr(settings, 'ROOT_URLCONF', None):
|
| 11 |
+
from django.urls import get_resolver
|
| 12 |
+
resolver = get_resolver()
|
| 13 |
+
return check_resolver(resolver)
|
| 14 |
+
return []
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def check_resolver(resolver):
|
| 18 |
+
"""
|
| 19 |
+
Recursively check the resolver.
|
| 20 |
+
"""
|
| 21 |
+
check_method = getattr(resolver, 'check', None)
|
| 22 |
+
if check_method is not None:
|
| 23 |
+
return check_method()
|
| 24 |
+
elif not hasattr(resolver, 'resolve'):
|
| 25 |
+
return get_warning_for_invalid_pattern(resolver)
|
| 26 |
+
else:
|
| 27 |
+
return []
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@register(Tags.urls)
|
| 31 |
+
def check_url_namespaces_unique(app_configs, **kwargs):
|
| 32 |
+
"""
|
| 33 |
+
Warn if URL namespaces used in applications aren't unique.
|
| 34 |
+
"""
|
| 35 |
+
if not getattr(settings, 'ROOT_URLCONF', None):
|
| 36 |
+
return []
|
| 37 |
+
|
| 38 |
+
from django.urls import get_resolver
|
| 39 |
+
resolver = get_resolver()
|
| 40 |
+
all_namespaces = _load_all_namespaces(resolver)
|
| 41 |
+
counter = Counter(all_namespaces)
|
| 42 |
+
non_unique_namespaces = [n for n, count in counter.items() if count > 1]
|
| 43 |
+
errors = []
|
| 44 |
+
for namespace in non_unique_namespaces:
|
| 45 |
+
errors.append(Warning(
|
| 46 |
+
"URL namespace '{}' isn't unique. You may not be able to reverse "
|
| 47 |
+
"all URLs in this namespace".format(namespace),
|
| 48 |
+
id="urls.W005",
|
| 49 |
+
))
|
| 50 |
+
return errors
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _load_all_namespaces(resolver, parents=()):
|
| 54 |
+
"""
|
| 55 |
+
Recursively load all namespaces from URL patterns.
|
| 56 |
+
"""
|
| 57 |
+
url_patterns = getattr(resolver, 'url_patterns', [])
|
| 58 |
+
namespaces = [
|
| 59 |
+
':'.join(parents + (url.namespace,)) for url in url_patterns
|
| 60 |
+
if getattr(url, 'namespace', None) is not None
|
| 61 |
+
]
|
| 62 |
+
for pattern in url_patterns:
|
| 63 |
+
namespace = getattr(pattern, 'namespace', None)
|
| 64 |
+
current = parents
|
| 65 |
+
if namespace is not None:
|
| 66 |
+
current += (namespace,)
|
| 67 |
+
namespaces.extend(_load_all_namespaces(pattern, current))
|
| 68 |
+
return namespaces
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_warning_for_invalid_pattern(pattern):
|
| 72 |
+
"""
|
| 73 |
+
Return a list containing a warning that the pattern is invalid.
|
| 74 |
+
|
| 75 |
+
describe_pattern() cannot be used here, because we cannot rely on the
|
| 76 |
+
urlpattern having regex or name attributes.
|
| 77 |
+
"""
|
| 78 |
+
if isinstance(pattern, str):
|
| 79 |
+
hint = (
|
| 80 |
+
"Try removing the string '{}'. The list of urlpatterns should not "
|
| 81 |
+
"have a prefix string as the first element.".format(pattern)
|
| 82 |
+
)
|
| 83 |
+
elif isinstance(pattern, tuple):
|
| 84 |
+
hint = "Try using path() instead of a tuple."
|
| 85 |
+
else:
|
| 86 |
+
hint = None
|
| 87 |
+
|
| 88 |
+
return [Error(
|
| 89 |
+
"Your URL pattern {!r} is invalid. Ensure that urlpatterns is a list "
|
| 90 |
+
"of path() and/or re_path() instances.".format(pattern),
|
| 91 |
+
hint=hint,
|
| 92 |
+
id="urls.E004",
|
| 93 |
+
)]
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
@register(Tags.urls)
|
| 97 |
+
def check_url_settings(app_configs, **kwargs):
|
| 98 |
+
errors = []
|
| 99 |
+
for name in ('STATIC_URL', 'MEDIA_URL'):
|
| 100 |
+
value = getattr(settings, name)
|
| 101 |
+
if value and not value.endswith('/'):
|
| 102 |
+
errors.append(E006(name))
|
| 103 |
+
return errors
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def E006(name):
|
| 107 |
+
return Error(
|
| 108 |
+
'The {} setting must end with a slash.'.format(name),
|
| 109 |
+
id='urls.E006',
|
| 110 |
+
)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/exceptions.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Global Django exception and warning classes.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class FieldDoesNotExist(Exception):
|
| 7 |
+
"""The requested model field does not exist"""
|
| 8 |
+
pass
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class AppRegistryNotReady(Exception):
|
| 12 |
+
"""The django.apps registry is not populated yet"""
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ObjectDoesNotExist(Exception):
|
| 17 |
+
"""The requested object does not exist"""
|
| 18 |
+
silent_variable_failure = True
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class MultipleObjectsReturned(Exception):
|
| 22 |
+
"""The query returned multiple objects when only one was expected."""
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class SuspiciousOperation(Exception):
|
| 27 |
+
"""The user did something suspicious"""
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class SuspiciousMultipartForm(SuspiciousOperation):
|
| 31 |
+
"""Suspect MIME request in multipart form data"""
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class SuspiciousFileOperation(SuspiciousOperation):
|
| 36 |
+
"""A Suspicious filesystem operation was attempted"""
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class DisallowedHost(SuspiciousOperation):
|
| 41 |
+
"""HTTP_HOST header contains invalid value"""
|
| 42 |
+
pass
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class DisallowedRedirect(SuspiciousOperation):
|
| 46 |
+
"""Redirect to scheme not in allowed list"""
|
| 47 |
+
pass
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TooManyFieldsSent(SuspiciousOperation):
|
| 51 |
+
"""
|
| 52 |
+
The number of fields in a GET or POST request exceeded
|
| 53 |
+
settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.
|
| 54 |
+
"""
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class RequestDataTooBig(SuspiciousOperation):
|
| 59 |
+
"""
|
| 60 |
+
The size of the request (excluding any file uploads) exceeded
|
| 61 |
+
settings.DATA_UPLOAD_MAX_MEMORY_SIZE.
|
| 62 |
+
"""
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class PermissionDenied(Exception):
|
| 67 |
+
"""The user did not have permission to do that"""
|
| 68 |
+
pass
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class ViewDoesNotExist(Exception):
|
| 72 |
+
"""The requested view does not exist"""
|
| 73 |
+
pass
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class MiddlewareNotUsed(Exception):
|
| 77 |
+
"""This middleware is not used in this server configuration"""
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class ImproperlyConfigured(Exception):
|
| 82 |
+
"""Django is somehow improperly configured"""
|
| 83 |
+
pass
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class FieldError(Exception):
|
| 87 |
+
"""Some kind of problem with a model field."""
|
| 88 |
+
pass
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
NON_FIELD_ERRORS = '__all__'
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class ValidationError(Exception):
|
| 95 |
+
"""An error while validating data."""
|
| 96 |
+
def __init__(self, message, code=None, params=None):
|
| 97 |
+
"""
|
| 98 |
+
The `message` argument can be a single error, a list of errors, or a
|
| 99 |
+
dictionary that maps field names to lists of errors. What we define as
|
| 100 |
+
an "error" can be either a simple string or an instance of
|
| 101 |
+
ValidationError with its message attribute set, and what we define as
|
| 102 |
+
list or dictionary can be an actual `list` or `dict` or an instance
|
| 103 |
+
of ValidationError with its `error_list` or `error_dict` attribute set.
|
| 104 |
+
"""
|
| 105 |
+
super().__init__(message, code, params)
|
| 106 |
+
|
| 107 |
+
if isinstance(message, ValidationError):
|
| 108 |
+
if hasattr(message, 'error_dict'):
|
| 109 |
+
message = message.error_dict
|
| 110 |
+
elif not hasattr(message, 'message'):
|
| 111 |
+
message = message.error_list
|
| 112 |
+
else:
|
| 113 |
+
message, code, params = message.message, message.code, message.params
|
| 114 |
+
|
| 115 |
+
if isinstance(message, dict):
|
| 116 |
+
self.error_dict = {}
|
| 117 |
+
for field, messages in message.items():
|
| 118 |
+
if not isinstance(messages, ValidationError):
|
| 119 |
+
messages = ValidationError(messages)
|
| 120 |
+
self.error_dict[field] = messages.error_list
|
| 121 |
+
|
| 122 |
+
elif isinstance(message, list):
|
| 123 |
+
self.error_list = []
|
| 124 |
+
for message in message:
|
| 125 |
+
# Normalize plain strings to instances of ValidationError.
|
| 126 |
+
if not isinstance(message, ValidationError):
|
| 127 |
+
message = ValidationError(message)
|
| 128 |
+
if hasattr(message, 'error_dict'):
|
| 129 |
+
self.error_list.extend(sum(message.error_dict.values(), []))
|
| 130 |
+
else:
|
| 131 |
+
self.error_list.extend(message.error_list)
|
| 132 |
+
|
| 133 |
+
else:
|
| 134 |
+
self.message = message
|
| 135 |
+
self.code = code
|
| 136 |
+
self.params = params
|
| 137 |
+
self.error_list = [self]
|
| 138 |
+
|
| 139 |
+
@property
|
| 140 |
+
def message_dict(self):
|
| 141 |
+
# Trigger an AttributeError if this ValidationError
|
| 142 |
+
# doesn't have an error_dict.
|
| 143 |
+
getattr(self, 'error_dict')
|
| 144 |
+
|
| 145 |
+
return dict(self)
|
| 146 |
+
|
| 147 |
+
@property
|
| 148 |
+
def messages(self):
|
| 149 |
+
if hasattr(self, 'error_dict'):
|
| 150 |
+
return sum(dict(self).values(), [])
|
| 151 |
+
return list(self)
|
| 152 |
+
|
| 153 |
+
def update_error_dict(self, error_dict):
|
| 154 |
+
if hasattr(self, 'error_dict'):
|
| 155 |
+
for field, error_list in self.error_dict.items():
|
| 156 |
+
error_dict.setdefault(field, []).extend(error_list)
|
| 157 |
+
else:
|
| 158 |
+
error_dict.setdefault(NON_FIELD_ERRORS, []).extend(self.error_list)
|
| 159 |
+
return error_dict
|
| 160 |
+
|
| 161 |
+
def __iter__(self):
|
| 162 |
+
if hasattr(self, 'error_dict'):
|
| 163 |
+
for field, errors in self.error_dict.items():
|
| 164 |
+
yield field, list(ValidationError(errors))
|
| 165 |
+
else:
|
| 166 |
+
for error in self.error_list:
|
| 167 |
+
message = error.message
|
| 168 |
+
if error.params:
|
| 169 |
+
message %= error.params
|
| 170 |
+
yield str(message)
|
| 171 |
+
|
| 172 |
+
def __str__(self):
|
| 173 |
+
if hasattr(self, 'error_dict'):
|
| 174 |
+
return repr(dict(self))
|
| 175 |
+
return repr(list(self))
|
| 176 |
+
|
| 177 |
+
def __repr__(self):
|
| 178 |
+
return 'ValidationError(%s)' % self
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
class EmptyResultSet(Exception):
|
| 182 |
+
"""A database query predicate is impossible."""
|
| 183 |
+
pass
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.core.files.base import File
|
| 2 |
+
|
| 3 |
+
__all__ = ['File']
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/base.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from io import BytesIO, StringIO, UnsupportedOperation
|
| 3 |
+
|
| 4 |
+
from django.core.files.utils import FileProxyMixin
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class File(FileProxyMixin):
|
| 8 |
+
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
|
| 9 |
+
|
| 10 |
+
def __init__(self, file, name=None):
|
| 11 |
+
self.file = file
|
| 12 |
+
if name is None:
|
| 13 |
+
name = getattr(file, 'name', None)
|
| 14 |
+
self.name = name
|
| 15 |
+
if hasattr(file, 'mode'):
|
| 16 |
+
self.mode = file.mode
|
| 17 |
+
|
| 18 |
+
def __str__(self):
|
| 19 |
+
return self.name or ''
|
| 20 |
+
|
| 21 |
+
def __repr__(self):
|
| 22 |
+
return "<%s: %s>" % (self.__class__.__name__, self or "None")
|
| 23 |
+
|
| 24 |
+
def __bool__(self):
|
| 25 |
+
return bool(self.name)
|
| 26 |
+
|
| 27 |
+
def __len__(self):
|
| 28 |
+
return self.size
|
| 29 |
+
|
| 30 |
+
def _get_size_from_underlying_file(self):
|
| 31 |
+
if hasattr(self.file, 'size'):
|
| 32 |
+
return self.file.size
|
| 33 |
+
if hasattr(self.file, 'name'):
|
| 34 |
+
try:
|
| 35 |
+
return os.path.getsize(self.file.name)
|
| 36 |
+
except (OSError, TypeError):
|
| 37 |
+
pass
|
| 38 |
+
if hasattr(self.file, 'tell') and hasattr(self.file, 'seek'):
|
| 39 |
+
pos = self.file.tell()
|
| 40 |
+
self.file.seek(0, os.SEEK_END)
|
| 41 |
+
size = self.file.tell()
|
| 42 |
+
self.file.seek(pos)
|
| 43 |
+
return size
|
| 44 |
+
raise AttributeError("Unable to determine the file's size.")
|
| 45 |
+
|
| 46 |
+
def _get_size(self):
|
| 47 |
+
if hasattr(self, '_size'):
|
| 48 |
+
return self._size
|
| 49 |
+
self._size = self._get_size_from_underlying_file()
|
| 50 |
+
return self._size
|
| 51 |
+
|
| 52 |
+
def _set_size(self, size):
|
| 53 |
+
self._size = size
|
| 54 |
+
|
| 55 |
+
size = property(_get_size, _set_size)
|
| 56 |
+
|
| 57 |
+
def chunks(self, chunk_size=None):
|
| 58 |
+
"""
|
| 59 |
+
Read the file and yield chunks of ``chunk_size`` bytes (defaults to
|
| 60 |
+
``UploadedFile.DEFAULT_CHUNK_SIZE``).
|
| 61 |
+
"""
|
| 62 |
+
if not chunk_size:
|
| 63 |
+
chunk_size = self.DEFAULT_CHUNK_SIZE
|
| 64 |
+
|
| 65 |
+
try:
|
| 66 |
+
self.seek(0)
|
| 67 |
+
except (AttributeError, UnsupportedOperation):
|
| 68 |
+
pass
|
| 69 |
+
|
| 70 |
+
while True:
|
| 71 |
+
data = self.read(chunk_size)
|
| 72 |
+
if not data:
|
| 73 |
+
break
|
| 74 |
+
yield data
|
| 75 |
+
|
| 76 |
+
def multiple_chunks(self, chunk_size=None):
|
| 77 |
+
"""
|
| 78 |
+
Return ``True`` if you can expect multiple chunks.
|
| 79 |
+
|
| 80 |
+
NB: If a particular file representation is in memory, subclasses should
|
| 81 |
+
always return ``False`` -- there's no good reason to read from memory in
|
| 82 |
+
chunks.
|
| 83 |
+
"""
|
| 84 |
+
if not chunk_size:
|
| 85 |
+
chunk_size = self.DEFAULT_CHUNK_SIZE
|
| 86 |
+
return self.size > chunk_size
|
| 87 |
+
|
| 88 |
+
def __iter__(self):
|
| 89 |
+
# Iterate over this file-like object by newlines
|
| 90 |
+
buffer_ = None
|
| 91 |
+
for chunk in self.chunks():
|
| 92 |
+
for line in chunk.splitlines(True):
|
| 93 |
+
if buffer_:
|
| 94 |
+
if endswith_cr(buffer_) and not equals_lf(line):
|
| 95 |
+
# Line split after a \r newline; yield buffer_.
|
| 96 |
+
yield buffer_
|
| 97 |
+
# Continue with line.
|
| 98 |
+
else:
|
| 99 |
+
# Line either split without a newline (line
|
| 100 |
+
# continues after buffer_) or with \r\n
|
| 101 |
+
# newline (line == b'\n').
|
| 102 |
+
line = buffer_ + line
|
| 103 |
+
# buffer_ handled, clear it.
|
| 104 |
+
buffer_ = None
|
| 105 |
+
|
| 106 |
+
# If this is the end of a \n or \r\n line, yield.
|
| 107 |
+
if endswith_lf(line):
|
| 108 |
+
yield line
|
| 109 |
+
else:
|
| 110 |
+
buffer_ = line
|
| 111 |
+
|
| 112 |
+
if buffer_ is not None:
|
| 113 |
+
yield buffer_
|
| 114 |
+
|
| 115 |
+
def __enter__(self):
|
| 116 |
+
return self
|
| 117 |
+
|
| 118 |
+
def __exit__(self, exc_type, exc_value, tb):
|
| 119 |
+
self.close()
|
| 120 |
+
|
| 121 |
+
def open(self, mode=None):
|
| 122 |
+
if not self.closed:
|
| 123 |
+
self.seek(0)
|
| 124 |
+
elif self.name and os.path.exists(self.name):
|
| 125 |
+
self.file = open(self.name, mode or self.mode)
|
| 126 |
+
else:
|
| 127 |
+
raise ValueError("The file cannot be reopened.")
|
| 128 |
+
return self
|
| 129 |
+
|
| 130 |
+
def close(self):
|
| 131 |
+
self.file.close()
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class ContentFile(File):
|
| 135 |
+
"""
|
| 136 |
+
A File-like object that take just raw content, rather than an actual file.
|
| 137 |
+
"""
|
| 138 |
+
def __init__(self, content, name=None):
|
| 139 |
+
stream_class = StringIO if isinstance(content, str) else BytesIO
|
| 140 |
+
super().__init__(stream_class(content), name=name)
|
| 141 |
+
self.size = len(content)
|
| 142 |
+
|
| 143 |
+
def __str__(self):
|
| 144 |
+
return 'Raw content'
|
| 145 |
+
|
| 146 |
+
def __bool__(self):
|
| 147 |
+
return True
|
| 148 |
+
|
| 149 |
+
def open(self, mode=None):
|
| 150 |
+
self.seek(0)
|
| 151 |
+
return self
|
| 152 |
+
|
| 153 |
+
def close(self):
|
| 154 |
+
pass
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def endswith_cr(line):
|
| 158 |
+
"""Return True if line (a text or byte string) ends with '\r'."""
|
| 159 |
+
return line.endswith('\r' if isinstance(line, str) else b'\r')
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
def endswith_lf(line):
|
| 163 |
+
"""Return True if line (a text or byte string) ends with '\n'."""
|
| 164 |
+
return line.endswith('\n' if isinstance(line, str) else b'\n')
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def equals_lf(line):
|
| 168 |
+
"""Return True if line (a text or byte string) equals '\n'."""
|
| 169 |
+
return line == ('\n' if isinstance(line, str) else b'\n')
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/images.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utility functions for handling images.
|
| 3 |
+
|
| 4 |
+
Requires Pillow as you might imagine.
|
| 5 |
+
"""
|
| 6 |
+
import struct
|
| 7 |
+
import zlib
|
| 8 |
+
|
| 9 |
+
from django.core.files import File
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ImageFile(File):
|
| 13 |
+
"""
|
| 14 |
+
A mixin for use alongside django.core.files.base.File, which provides
|
| 15 |
+
additional features for dealing with images.
|
| 16 |
+
"""
|
| 17 |
+
@property
|
| 18 |
+
def width(self):
|
| 19 |
+
return self._get_image_dimensions()[0]
|
| 20 |
+
|
| 21 |
+
@property
|
| 22 |
+
def height(self):
|
| 23 |
+
return self._get_image_dimensions()[1]
|
| 24 |
+
|
| 25 |
+
def _get_image_dimensions(self):
|
| 26 |
+
if not hasattr(self, '_dimensions_cache'):
|
| 27 |
+
close = self.closed
|
| 28 |
+
self.open()
|
| 29 |
+
self._dimensions_cache = get_image_dimensions(self, close=close)
|
| 30 |
+
return self._dimensions_cache
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def get_image_dimensions(file_or_path, close=False):
|
| 34 |
+
"""
|
| 35 |
+
Return the (width, height) of an image, given an open file or a path. Set
|
| 36 |
+
'close' to True to close the file at the end if it is initially in an open
|
| 37 |
+
state.
|
| 38 |
+
"""
|
| 39 |
+
from PIL import ImageFile as PillowImageFile
|
| 40 |
+
|
| 41 |
+
p = PillowImageFile.Parser()
|
| 42 |
+
if hasattr(file_or_path, 'read'):
|
| 43 |
+
file = file_or_path
|
| 44 |
+
file_pos = file.tell()
|
| 45 |
+
file.seek(0)
|
| 46 |
+
else:
|
| 47 |
+
file = open(file_or_path, 'rb')
|
| 48 |
+
close = True
|
| 49 |
+
try:
|
| 50 |
+
# Most of the time Pillow only needs a small chunk to parse the image
|
| 51 |
+
# and get the dimensions, but with some TIFF files Pillow needs to
|
| 52 |
+
# parse the whole file.
|
| 53 |
+
chunk_size = 1024
|
| 54 |
+
while 1:
|
| 55 |
+
data = file.read(chunk_size)
|
| 56 |
+
if not data:
|
| 57 |
+
break
|
| 58 |
+
try:
|
| 59 |
+
p.feed(data)
|
| 60 |
+
except zlib.error as e:
|
| 61 |
+
# ignore zlib complaining on truncated stream, just feed more
|
| 62 |
+
# data to parser (ticket #19457).
|
| 63 |
+
if e.args[0].startswith("Error -5"):
|
| 64 |
+
pass
|
| 65 |
+
else:
|
| 66 |
+
raise
|
| 67 |
+
except struct.error:
|
| 68 |
+
# Ignore PIL failing on a too short buffer when reads return
|
| 69 |
+
# less bytes than expected. Skip and feed more data to the
|
| 70 |
+
# parser (ticket #24544).
|
| 71 |
+
pass
|
| 72 |
+
if p.image:
|
| 73 |
+
return p.image.size
|
| 74 |
+
chunk_size *= 2
|
| 75 |
+
return (None, None)
|
| 76 |
+
finally:
|
| 77 |
+
if close:
|
| 78 |
+
file.close()
|
| 79 |
+
else:
|
| 80 |
+
file.seek(file_pos)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/locks.py
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Portable file locking utilities.
|
| 3 |
+
|
| 4 |
+
Based partially on an example by Jonathan Feignberg in the Python
|
| 5 |
+
Cookbook [1] (licensed under the Python Software License) and a ctypes port by
|
| 6 |
+
Anatoly Techtonik for Roundup [2] (license [3]).
|
| 7 |
+
|
| 8 |
+
[1] http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65203
|
| 9 |
+
[2] http://sourceforge.net/p/roundup/code/ci/default/tree/roundup/backends/portalocker.py
|
| 10 |
+
[3] http://sourceforge.net/p/roundup/code/ci/default/tree/COPYING.txt
|
| 11 |
+
|
| 12 |
+
Example Usage::
|
| 13 |
+
|
| 14 |
+
>>> from django.core.files import locks
|
| 15 |
+
>>> with open('./file', 'wb') as f:
|
| 16 |
+
... locks.lock(f, locks.LOCK_EX)
|
| 17 |
+
... f.write('Django')
|
| 18 |
+
"""
|
| 19 |
+
import os
|
| 20 |
+
|
| 21 |
+
__all__ = ('LOCK_EX', 'LOCK_SH', 'LOCK_NB', 'lock', 'unlock')
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _fd(f):
|
| 25 |
+
"""Get a filedescriptor from something which could be a file or an fd."""
|
| 26 |
+
return f.fileno() if hasattr(f, 'fileno') else f
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
if os.name == 'nt':
|
| 30 |
+
import msvcrt
|
| 31 |
+
from ctypes import (sizeof, c_ulong, c_void_p, c_int64,
|
| 32 |
+
Structure, Union, POINTER, windll, byref)
|
| 33 |
+
from ctypes.wintypes import BOOL, DWORD, HANDLE
|
| 34 |
+
|
| 35 |
+
LOCK_SH = 0 # the default
|
| 36 |
+
LOCK_NB = 0x1 # LOCKFILE_FAIL_IMMEDIATELY
|
| 37 |
+
LOCK_EX = 0x2 # LOCKFILE_EXCLUSIVE_LOCK
|
| 38 |
+
|
| 39 |
+
# --- Adapted from the pyserial project ---
|
| 40 |
+
# detect size of ULONG_PTR
|
| 41 |
+
if sizeof(c_ulong) != sizeof(c_void_p):
|
| 42 |
+
ULONG_PTR = c_int64
|
| 43 |
+
else:
|
| 44 |
+
ULONG_PTR = c_ulong
|
| 45 |
+
PVOID = c_void_p
|
| 46 |
+
|
| 47 |
+
# --- Union inside Structure by stackoverflow:3480240 ---
|
| 48 |
+
class _OFFSET(Structure):
|
| 49 |
+
_fields_ = [
|
| 50 |
+
('Offset', DWORD),
|
| 51 |
+
('OffsetHigh', DWORD)]
|
| 52 |
+
|
| 53 |
+
class _OFFSET_UNION(Union):
|
| 54 |
+
_anonymous_ = ['_offset']
|
| 55 |
+
_fields_ = [
|
| 56 |
+
('_offset', _OFFSET),
|
| 57 |
+
('Pointer', PVOID)]
|
| 58 |
+
|
| 59 |
+
class OVERLAPPED(Structure):
|
| 60 |
+
_anonymous_ = ['_offset_union']
|
| 61 |
+
_fields_ = [
|
| 62 |
+
('Internal', ULONG_PTR),
|
| 63 |
+
('InternalHigh', ULONG_PTR),
|
| 64 |
+
('_offset_union', _OFFSET_UNION),
|
| 65 |
+
('hEvent', HANDLE)]
|
| 66 |
+
|
| 67 |
+
LPOVERLAPPED = POINTER(OVERLAPPED)
|
| 68 |
+
|
| 69 |
+
# --- Define function prototypes for extra safety ---
|
| 70 |
+
LockFileEx = windll.kernel32.LockFileEx
|
| 71 |
+
LockFileEx.restype = BOOL
|
| 72 |
+
LockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, DWORD, LPOVERLAPPED]
|
| 73 |
+
UnlockFileEx = windll.kernel32.UnlockFileEx
|
| 74 |
+
UnlockFileEx.restype = BOOL
|
| 75 |
+
UnlockFileEx.argtypes = [HANDLE, DWORD, DWORD, DWORD, LPOVERLAPPED]
|
| 76 |
+
|
| 77 |
+
def lock(f, flags):
|
| 78 |
+
hfile = msvcrt.get_osfhandle(_fd(f))
|
| 79 |
+
overlapped = OVERLAPPED()
|
| 80 |
+
ret = LockFileEx(hfile, flags, 0, 0, 0xFFFF0000, byref(overlapped))
|
| 81 |
+
return bool(ret)
|
| 82 |
+
|
| 83 |
+
def unlock(f):
|
| 84 |
+
hfile = msvcrt.get_osfhandle(_fd(f))
|
| 85 |
+
overlapped = OVERLAPPED()
|
| 86 |
+
ret = UnlockFileEx(hfile, 0, 0, 0xFFFF0000, byref(overlapped))
|
| 87 |
+
return bool(ret)
|
| 88 |
+
else:
|
| 89 |
+
try:
|
| 90 |
+
import fcntl
|
| 91 |
+
LOCK_SH = fcntl.LOCK_SH # shared lock
|
| 92 |
+
LOCK_NB = fcntl.LOCK_NB # non-blocking
|
| 93 |
+
LOCK_EX = fcntl.LOCK_EX
|
| 94 |
+
except (ImportError, AttributeError):
|
| 95 |
+
# File locking is not supported.
|
| 96 |
+
LOCK_EX = LOCK_SH = LOCK_NB = 0
|
| 97 |
+
|
| 98 |
+
# Dummy functions that don't do anything.
|
| 99 |
+
def lock(f, flags):
|
| 100 |
+
# File is not locked
|
| 101 |
+
return False
|
| 102 |
+
|
| 103 |
+
def unlock(f):
|
| 104 |
+
# File is unlocked
|
| 105 |
+
return True
|
| 106 |
+
else:
|
| 107 |
+
def lock(f, flags):
|
| 108 |
+
ret = fcntl.flock(_fd(f), flags)
|
| 109 |
+
return ret == 0
|
| 110 |
+
|
| 111 |
+
def unlock(f):
|
| 112 |
+
ret = fcntl.flock(_fd(f), fcntl.LOCK_UN)
|
| 113 |
+
return ret == 0
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/move.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Move a file in the safest way possible::
|
| 3 |
+
|
| 4 |
+
>>> from django.core.files.move import file_move_safe
|
| 5 |
+
>>> file_move_safe("/tmp/old_file", "/tmp/new_file")
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import errno
|
| 9 |
+
import os
|
| 10 |
+
from shutil import copystat
|
| 11 |
+
|
| 12 |
+
from django.core.files import locks
|
| 13 |
+
|
| 14 |
+
__all__ = ['file_move_safe']
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def _samefile(src, dst):
|
| 18 |
+
# Macintosh, Unix.
|
| 19 |
+
if hasattr(os.path, 'samefile'):
|
| 20 |
+
try:
|
| 21 |
+
return os.path.samefile(src, dst)
|
| 22 |
+
except OSError:
|
| 23 |
+
return False
|
| 24 |
+
|
| 25 |
+
# All other platforms: check for same pathname.
|
| 26 |
+
return (os.path.normcase(os.path.abspath(src)) ==
|
| 27 |
+
os.path.normcase(os.path.abspath(dst)))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False):
|
| 31 |
+
"""
|
| 32 |
+
Move a file from one location to another in the safest way possible.
|
| 33 |
+
|
| 34 |
+
First, try ``os.rename``, which is simple but will break across filesystems.
|
| 35 |
+
If that fails, stream manually from one file to another in pure Python.
|
| 36 |
+
|
| 37 |
+
If the destination file exists and ``allow_overwrite`` is ``False``, raise
|
| 38 |
+
``IOError``.
|
| 39 |
+
"""
|
| 40 |
+
# There's no reason to move if we don't have to.
|
| 41 |
+
if _samefile(old_file_name, new_file_name):
|
| 42 |
+
return
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
if not allow_overwrite and os.access(new_file_name, os.F_OK):
|
| 46 |
+
raise IOError("Destination file %s exists and allow_overwrite is False" % new_file_name)
|
| 47 |
+
|
| 48 |
+
os.rename(old_file_name, new_file_name)
|
| 49 |
+
return
|
| 50 |
+
except OSError:
|
| 51 |
+
# OSError happens with os.rename() if moving to another filesystem or
|
| 52 |
+
# when moving opened files on certain operating systems.
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
# first open the old file, so that it won't go away
|
| 56 |
+
with open(old_file_name, 'rb') as old_file:
|
| 57 |
+
# now open the new file, not forgetting allow_overwrite
|
| 58 |
+
fd = os.open(new_file_name, (os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
|
| 59 |
+
(os.O_EXCL if not allow_overwrite else 0)))
|
| 60 |
+
try:
|
| 61 |
+
locks.lock(fd, locks.LOCK_EX)
|
| 62 |
+
current_chunk = None
|
| 63 |
+
while current_chunk != b'':
|
| 64 |
+
current_chunk = old_file.read(chunk_size)
|
| 65 |
+
os.write(fd, current_chunk)
|
| 66 |
+
finally:
|
| 67 |
+
locks.unlock(fd)
|
| 68 |
+
os.close(fd)
|
| 69 |
+
|
| 70 |
+
try:
|
| 71 |
+
copystat(old_file_name, new_file_name)
|
| 72 |
+
except PermissionError as e:
|
| 73 |
+
# Certain filesystems (e.g. CIFS) fail to copy the file's metadata if
|
| 74 |
+
# the type of the destination filesystem isn't the same as the source
|
| 75 |
+
# filesystem; ignore that.
|
| 76 |
+
if e.errno != errno.EPERM:
|
| 77 |
+
raise
|
| 78 |
+
|
| 79 |
+
try:
|
| 80 |
+
os.remove(old_file_name)
|
| 81 |
+
except PermissionError as e:
|
| 82 |
+
# Certain operating systems (Cygwin and Windows)
|
| 83 |
+
# fail when deleting opened files, ignore it. (For the
|
| 84 |
+
# systems where this happens, temporary files will be auto-deleted
|
| 85 |
+
# on close anyway.)
|
| 86 |
+
if getattr(e, 'winerror', 0) != 32:
|
| 87 |
+
raise
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/storage.py
ADDED
|
@@ -0,0 +1,364 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from urllib.parse import urljoin
|
| 4 |
+
|
| 5 |
+
from django.conf import settings
|
| 6 |
+
from django.core.exceptions import SuspiciousFileOperation
|
| 7 |
+
from django.core.files import File, locks
|
| 8 |
+
from django.core.files.move import file_move_safe
|
| 9 |
+
from django.core.signals import setting_changed
|
| 10 |
+
from django.utils import timezone
|
| 11 |
+
from django.utils._os import safe_join
|
| 12 |
+
from django.utils.crypto import get_random_string
|
| 13 |
+
from django.utils.deconstruct import deconstructible
|
| 14 |
+
from django.utils.encoding import filepath_to_uri
|
| 15 |
+
from django.utils.functional import LazyObject, cached_property
|
| 16 |
+
from django.utils.module_loading import import_string
|
| 17 |
+
from django.utils.text import get_valid_filename
|
| 18 |
+
|
| 19 |
+
__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class Storage:
|
| 23 |
+
"""
|
| 24 |
+
A base storage class, providing some default behaviors that all other
|
| 25 |
+
storage systems can inherit or override, as necessary.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# The following methods represent a public interface to private methods.
|
| 29 |
+
# These shouldn't be overridden by subclasses unless absolutely necessary.
|
| 30 |
+
|
| 31 |
+
def open(self, name, mode='rb'):
|
| 32 |
+
"""Retrieve the specified file from storage."""
|
| 33 |
+
return self._open(name, mode)
|
| 34 |
+
|
| 35 |
+
def save(self, name, content, max_length=None):
|
| 36 |
+
"""
|
| 37 |
+
Save new content to the file specified by name. The content should be
|
| 38 |
+
a proper File object or any python file-like object, ready to be read
|
| 39 |
+
from the beginning.
|
| 40 |
+
"""
|
| 41 |
+
# Get the proper name for the file, as it will actually be saved.
|
| 42 |
+
if name is None:
|
| 43 |
+
name = content.name
|
| 44 |
+
|
| 45 |
+
if not hasattr(content, 'chunks'):
|
| 46 |
+
content = File(content, name)
|
| 47 |
+
|
| 48 |
+
name = self.get_available_name(name, max_length=max_length)
|
| 49 |
+
return self._save(name, content)
|
| 50 |
+
|
| 51 |
+
# These methods are part of the public API, with default implementations.
|
| 52 |
+
|
| 53 |
+
def get_valid_name(self, name):
|
| 54 |
+
"""
|
| 55 |
+
Return a filename, based on the provided filename, that's suitable for
|
| 56 |
+
use in the target storage system.
|
| 57 |
+
"""
|
| 58 |
+
return get_valid_filename(name)
|
| 59 |
+
|
| 60 |
+
def get_available_name(self, name, max_length=None):
|
| 61 |
+
"""
|
| 62 |
+
Return a filename that's free on the target storage system and
|
| 63 |
+
available for new content to be written to.
|
| 64 |
+
"""
|
| 65 |
+
dir_name, file_name = os.path.split(name)
|
| 66 |
+
file_root, file_ext = os.path.splitext(file_name)
|
| 67 |
+
# If the filename already exists, add an underscore and a random 7
|
| 68 |
+
# character alphanumeric string (before the file extension, if one
|
| 69 |
+
# exists) to the filename until the generated filename doesn't exist.
|
| 70 |
+
# Truncate original name if required, so the new filename does not
|
| 71 |
+
# exceed the max_length.
|
| 72 |
+
while self.exists(name) or (max_length and len(name) > max_length):
|
| 73 |
+
# file_ext includes the dot.
|
| 74 |
+
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
|
| 75 |
+
if max_length is None:
|
| 76 |
+
continue
|
| 77 |
+
# Truncate file_root if max_length exceeded.
|
| 78 |
+
truncation = len(name) - max_length
|
| 79 |
+
if truncation > 0:
|
| 80 |
+
file_root = file_root[:-truncation]
|
| 81 |
+
# Entire file_root was truncated in attempt to find an available filename.
|
| 82 |
+
if not file_root:
|
| 83 |
+
raise SuspiciousFileOperation(
|
| 84 |
+
'Storage can not find an available filename for "%s". '
|
| 85 |
+
'Please make sure that the corresponding file field '
|
| 86 |
+
'allows sufficient "max_length".' % name
|
| 87 |
+
)
|
| 88 |
+
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
|
| 89 |
+
return name
|
| 90 |
+
|
| 91 |
+
def generate_filename(self, filename):
|
| 92 |
+
"""
|
| 93 |
+
Validate the filename by calling get_valid_name() and return a filename
|
| 94 |
+
to be passed to the save() method.
|
| 95 |
+
"""
|
| 96 |
+
# `filename` may include a path as returned by FileField.upload_to.
|
| 97 |
+
dirname, filename = os.path.split(filename)
|
| 98 |
+
return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename)))
|
| 99 |
+
|
| 100 |
+
def path(self, name):
|
| 101 |
+
"""
|
| 102 |
+
Return a local filesystem path where the file can be retrieved using
|
| 103 |
+
Python's built-in open() function. Storage systems that can't be
|
| 104 |
+
accessed using open() should *not* implement this method.
|
| 105 |
+
"""
|
| 106 |
+
raise NotImplementedError("This backend doesn't support absolute paths.")
|
| 107 |
+
|
| 108 |
+
# The following methods form the public API for storage systems, but with
|
| 109 |
+
# no default implementations. Subclasses must implement *all* of these.
|
| 110 |
+
|
| 111 |
+
def delete(self, name):
|
| 112 |
+
"""
|
| 113 |
+
Delete the specified file from the storage system.
|
| 114 |
+
"""
|
| 115 |
+
raise NotImplementedError('subclasses of Storage must provide a delete() method')
|
| 116 |
+
|
| 117 |
+
def exists(self, name):
|
| 118 |
+
"""
|
| 119 |
+
Return True if a file referenced by the given name already exists in the
|
| 120 |
+
storage system, or False if the name is available for a new file.
|
| 121 |
+
"""
|
| 122 |
+
raise NotImplementedError('subclasses of Storage must provide an exists() method')
|
| 123 |
+
|
| 124 |
+
def listdir(self, path):
|
| 125 |
+
"""
|
| 126 |
+
List the contents of the specified path. Return a 2-tuple of lists:
|
| 127 |
+
the first item being directories, the second item being files.
|
| 128 |
+
"""
|
| 129 |
+
raise NotImplementedError('subclasses of Storage must provide a listdir() method')
|
| 130 |
+
|
| 131 |
+
def size(self, name):
|
| 132 |
+
"""
|
| 133 |
+
Return the total size, in bytes, of the file specified by name.
|
| 134 |
+
"""
|
| 135 |
+
raise NotImplementedError('subclasses of Storage must provide a size() method')
|
| 136 |
+
|
| 137 |
+
def url(self, name):
|
| 138 |
+
"""
|
| 139 |
+
Return an absolute URL where the file's contents can be accessed
|
| 140 |
+
directly by a Web browser.
|
| 141 |
+
"""
|
| 142 |
+
raise NotImplementedError('subclasses of Storage must provide a url() method')
|
| 143 |
+
|
| 144 |
+
def get_accessed_time(self, name):
|
| 145 |
+
"""
|
| 146 |
+
Return the last accessed time (as a datetime) of the file specified by
|
| 147 |
+
name. The datetime will be timezone-aware if USE_TZ=True.
|
| 148 |
+
"""
|
| 149 |
+
raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method')
|
| 150 |
+
|
| 151 |
+
def get_created_time(self, name):
|
| 152 |
+
"""
|
| 153 |
+
Return the creation time (as a datetime) of the file specified by name.
|
| 154 |
+
The datetime will be timezone-aware if USE_TZ=True.
|
| 155 |
+
"""
|
| 156 |
+
raise NotImplementedError('subclasses of Storage must provide a get_created_time() method')
|
| 157 |
+
|
| 158 |
+
def get_modified_time(self, name):
|
| 159 |
+
"""
|
| 160 |
+
Return the last modified time (as a datetime) of the file specified by
|
| 161 |
+
name. The datetime will be timezone-aware if USE_TZ=True.
|
| 162 |
+
"""
|
| 163 |
+
raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method')
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
@deconstructible
|
| 167 |
+
class FileSystemStorage(Storage):
|
| 168 |
+
"""
|
| 169 |
+
Standard filesystem storage
|
| 170 |
+
"""
|
| 171 |
+
|
| 172 |
+
def __init__(self, location=None, base_url=None, file_permissions_mode=None,
|
| 173 |
+
directory_permissions_mode=None):
|
| 174 |
+
self._location = location
|
| 175 |
+
self._base_url = base_url
|
| 176 |
+
self._file_permissions_mode = file_permissions_mode
|
| 177 |
+
self._directory_permissions_mode = directory_permissions_mode
|
| 178 |
+
setting_changed.connect(self._clear_cached_properties)
|
| 179 |
+
|
| 180 |
+
def _clear_cached_properties(self, setting, **kwargs):
|
| 181 |
+
"""Reset setting based property values."""
|
| 182 |
+
if setting == 'MEDIA_ROOT':
|
| 183 |
+
self.__dict__.pop('base_location', None)
|
| 184 |
+
self.__dict__.pop('location', None)
|
| 185 |
+
elif setting == 'MEDIA_URL':
|
| 186 |
+
self.__dict__.pop('base_url', None)
|
| 187 |
+
elif setting == 'FILE_UPLOAD_PERMISSIONS':
|
| 188 |
+
self.__dict__.pop('file_permissions_mode', None)
|
| 189 |
+
elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS':
|
| 190 |
+
self.__dict__.pop('directory_permissions_mode', None)
|
| 191 |
+
|
| 192 |
+
def _value_or_setting(self, value, setting):
|
| 193 |
+
return setting if value is None else value
|
| 194 |
+
|
| 195 |
+
@cached_property
|
| 196 |
+
def base_location(self):
|
| 197 |
+
return self._value_or_setting(self._location, settings.MEDIA_ROOT)
|
| 198 |
+
|
| 199 |
+
@cached_property
|
| 200 |
+
def location(self):
|
| 201 |
+
return os.path.abspath(self.base_location)
|
| 202 |
+
|
| 203 |
+
@cached_property
|
| 204 |
+
def base_url(self):
|
| 205 |
+
if self._base_url is not None and not self._base_url.endswith('/'):
|
| 206 |
+
self._base_url += '/'
|
| 207 |
+
return self._value_or_setting(self._base_url, settings.MEDIA_URL)
|
| 208 |
+
|
| 209 |
+
@cached_property
|
| 210 |
+
def file_permissions_mode(self):
|
| 211 |
+
return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS)
|
| 212 |
+
|
| 213 |
+
@cached_property
|
| 214 |
+
def directory_permissions_mode(self):
|
| 215 |
+
return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS)
|
| 216 |
+
|
| 217 |
+
def _open(self, name, mode='rb'):
|
| 218 |
+
return File(open(self.path(name), mode))
|
| 219 |
+
|
| 220 |
+
def _save(self, name, content):
|
| 221 |
+
full_path = self.path(name)
|
| 222 |
+
|
| 223 |
+
# Create any intermediate directories that do not exist.
|
| 224 |
+
directory = os.path.dirname(full_path)
|
| 225 |
+
if not os.path.exists(directory):
|
| 226 |
+
try:
|
| 227 |
+
if self.directory_permissions_mode is not None:
|
| 228 |
+
# os.makedirs applies the global umask, so we reset it,
|
| 229 |
+
# for consistency with file_permissions_mode behavior.
|
| 230 |
+
old_umask = os.umask(0)
|
| 231 |
+
try:
|
| 232 |
+
os.makedirs(directory, self.directory_permissions_mode)
|
| 233 |
+
finally:
|
| 234 |
+
os.umask(old_umask)
|
| 235 |
+
else:
|
| 236 |
+
os.makedirs(directory)
|
| 237 |
+
except FileNotFoundError:
|
| 238 |
+
# There's a race between os.path.exists() and os.makedirs().
|
| 239 |
+
# If os.makedirs() fails with FileNotFoundError, the directory
|
| 240 |
+
# was created concurrently.
|
| 241 |
+
pass
|
| 242 |
+
if not os.path.isdir(directory):
|
| 243 |
+
raise IOError("%s exists and is not a directory." % directory)
|
| 244 |
+
|
| 245 |
+
# There's a potential race condition between get_available_name and
|
| 246 |
+
# saving the file; it's possible that two threads might return the
|
| 247 |
+
# same name, at which point all sorts of fun happens. So we need to
|
| 248 |
+
# try to create the file, but if it already exists we have to go back
|
| 249 |
+
# to get_available_name() and try again.
|
| 250 |
+
|
| 251 |
+
while True:
|
| 252 |
+
try:
|
| 253 |
+
# This file has a file path that we can move.
|
| 254 |
+
if hasattr(content, 'temporary_file_path'):
|
| 255 |
+
file_move_safe(content.temporary_file_path(), full_path)
|
| 256 |
+
|
| 257 |
+
# This is a normal uploadedfile that we can stream.
|
| 258 |
+
else:
|
| 259 |
+
# This fun binary flag incantation makes os.open throw an
|
| 260 |
+
# OSError if the file already exists before we open it.
|
| 261 |
+
flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL |
|
| 262 |
+
getattr(os, 'O_BINARY', 0))
|
| 263 |
+
# The current umask value is masked out by os.open!
|
| 264 |
+
fd = os.open(full_path, flags, 0o666)
|
| 265 |
+
_file = None
|
| 266 |
+
try:
|
| 267 |
+
locks.lock(fd, locks.LOCK_EX)
|
| 268 |
+
for chunk in content.chunks():
|
| 269 |
+
if _file is None:
|
| 270 |
+
mode = 'wb' if isinstance(chunk, bytes) else 'wt'
|
| 271 |
+
_file = os.fdopen(fd, mode)
|
| 272 |
+
_file.write(chunk)
|
| 273 |
+
finally:
|
| 274 |
+
locks.unlock(fd)
|
| 275 |
+
if _file is not None:
|
| 276 |
+
_file.close()
|
| 277 |
+
else:
|
| 278 |
+
os.close(fd)
|
| 279 |
+
except FileExistsError:
|
| 280 |
+
# A new name is needed if the file exists.
|
| 281 |
+
name = self.get_available_name(name)
|
| 282 |
+
full_path = self.path(name)
|
| 283 |
+
else:
|
| 284 |
+
# OK, the file save worked. Break out of the loop.
|
| 285 |
+
break
|
| 286 |
+
|
| 287 |
+
if self.file_permissions_mode is not None:
|
| 288 |
+
os.chmod(full_path, self.file_permissions_mode)
|
| 289 |
+
|
| 290 |
+
# Store filenames with forward slashes, even on Windows.
|
| 291 |
+
return name.replace('\\', '/')
|
| 292 |
+
|
| 293 |
+
def delete(self, name):
|
| 294 |
+
assert name, "The name argument is not allowed to be empty."
|
| 295 |
+
name = self.path(name)
|
| 296 |
+
# If the file or directory exists, delete it from the filesystem.
|
| 297 |
+
try:
|
| 298 |
+
if os.path.isdir(name):
|
| 299 |
+
os.rmdir(name)
|
| 300 |
+
else:
|
| 301 |
+
os.remove(name)
|
| 302 |
+
except FileNotFoundError:
|
| 303 |
+
# FileNotFoundError is raised if the file or directory was removed
|
| 304 |
+
# concurrently.
|
| 305 |
+
pass
|
| 306 |
+
|
| 307 |
+
def exists(self, name):
|
| 308 |
+
return os.path.exists(self.path(name))
|
| 309 |
+
|
| 310 |
+
def listdir(self, path):
|
| 311 |
+
path = self.path(path)
|
| 312 |
+
directories, files = [], []
|
| 313 |
+
for entry in os.listdir(path):
|
| 314 |
+
if os.path.isdir(os.path.join(path, entry)):
|
| 315 |
+
directories.append(entry)
|
| 316 |
+
else:
|
| 317 |
+
files.append(entry)
|
| 318 |
+
return directories, files
|
| 319 |
+
|
| 320 |
+
def path(self, name):
|
| 321 |
+
return safe_join(self.location, name)
|
| 322 |
+
|
| 323 |
+
def size(self, name):
|
| 324 |
+
return os.path.getsize(self.path(name))
|
| 325 |
+
|
| 326 |
+
def url(self, name):
|
| 327 |
+
if self.base_url is None:
|
| 328 |
+
raise ValueError("This file is not accessible via a URL.")
|
| 329 |
+
url = filepath_to_uri(name)
|
| 330 |
+
if url is not None:
|
| 331 |
+
url = url.lstrip('/')
|
| 332 |
+
return urljoin(self.base_url, url)
|
| 333 |
+
|
| 334 |
+
def _datetime_from_timestamp(self, ts):
|
| 335 |
+
"""
|
| 336 |
+
If timezone support is enabled, make an aware datetime object in UTC;
|
| 337 |
+
otherwise make a naive one in the local timezone.
|
| 338 |
+
"""
|
| 339 |
+
if settings.USE_TZ:
|
| 340 |
+
# Safe to use .replace() because UTC doesn't have DST
|
| 341 |
+
return datetime.utcfromtimestamp(ts).replace(tzinfo=timezone.utc)
|
| 342 |
+
else:
|
| 343 |
+
return datetime.fromtimestamp(ts)
|
| 344 |
+
|
| 345 |
+
def get_accessed_time(self, name):
|
| 346 |
+
return self._datetime_from_timestamp(os.path.getatime(self.path(name)))
|
| 347 |
+
|
| 348 |
+
def get_created_time(self, name):
|
| 349 |
+
return self._datetime_from_timestamp(os.path.getctime(self.path(name)))
|
| 350 |
+
|
| 351 |
+
def get_modified_time(self, name):
|
| 352 |
+
return self._datetime_from_timestamp(os.path.getmtime(self.path(name)))
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def get_storage_class(import_path=None):
|
| 356 |
+
return import_string(import_path or settings.DEFAULT_FILE_STORAGE)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
class DefaultStorage(LazyObject):
|
| 360 |
+
def _setup(self):
|
| 361 |
+
self._wrapped = get_storage_class()()
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
default_storage = DefaultStorage()
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/temp.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
The temp module provides a NamedTemporaryFile that can be reopened in the same
|
| 3 |
+
process on any platform. Most platforms use the standard Python
|
| 4 |
+
tempfile.NamedTemporaryFile class, but Windows users are given a custom class.
|
| 5 |
+
|
| 6 |
+
This is needed because the Python implementation of NamedTemporaryFile uses the
|
| 7 |
+
O_TEMPORARY flag under Windows, which prevents the file from being reopened
|
| 8 |
+
if the same flag is not provided [1][2]. Note that this does not address the
|
| 9 |
+
more general issue of opening a file for writing and reading in multiple
|
| 10 |
+
processes in a manner that works across platforms.
|
| 11 |
+
|
| 12 |
+
The custom version of NamedTemporaryFile doesn't support the same keyword
|
| 13 |
+
arguments available in tempfile.NamedTemporaryFile.
|
| 14 |
+
|
| 15 |
+
1: https://mail.python.org/pipermail/python-list/2005-December/336957.html
|
| 16 |
+
2: http://bugs.python.org/issue14243
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import os
|
| 20 |
+
import tempfile
|
| 21 |
+
|
| 22 |
+
from django.core.files.utils import FileProxyMixin
|
| 23 |
+
|
| 24 |
+
__all__ = ('NamedTemporaryFile', 'gettempdir',)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
if os.name == 'nt':
|
| 28 |
+
class TemporaryFile(FileProxyMixin):
|
| 29 |
+
"""
|
| 30 |
+
Temporary file object constructor that supports reopening of the
|
| 31 |
+
temporary file in Windows.
|
| 32 |
+
|
| 33 |
+
Unlike tempfile.NamedTemporaryFile from the standard library,
|
| 34 |
+
__init__() doesn't support the 'delete', 'buffering', 'encoding', or
|
| 35 |
+
'newline' keyword arguments.
|
| 36 |
+
"""
|
| 37 |
+
def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='', dir=None):
|
| 38 |
+
fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
|
| 39 |
+
self.name = name
|
| 40 |
+
self.file = os.fdopen(fd, mode, bufsize)
|
| 41 |
+
self.close_called = False
|
| 42 |
+
|
| 43 |
+
# Because close can be called during shutdown
|
| 44 |
+
# we need to cache os.unlink and access it
|
| 45 |
+
# as self.unlink only
|
| 46 |
+
unlink = os.unlink
|
| 47 |
+
|
| 48 |
+
def close(self):
|
| 49 |
+
if not self.close_called:
|
| 50 |
+
self.close_called = True
|
| 51 |
+
try:
|
| 52 |
+
self.file.close()
|
| 53 |
+
except (OSError, IOError):
|
| 54 |
+
pass
|
| 55 |
+
try:
|
| 56 |
+
self.unlink(self.name)
|
| 57 |
+
except OSError:
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
def __del__(self):
|
| 61 |
+
self.close()
|
| 62 |
+
|
| 63 |
+
def __enter__(self):
|
| 64 |
+
self.file.__enter__()
|
| 65 |
+
return self
|
| 66 |
+
|
| 67 |
+
def __exit__(self, exc, value, tb):
|
| 68 |
+
self.file.__exit__(exc, value, tb)
|
| 69 |
+
|
| 70 |
+
NamedTemporaryFile = TemporaryFile
|
| 71 |
+
else:
|
| 72 |
+
NamedTemporaryFile = tempfile.NamedTemporaryFile
|
| 73 |
+
|
| 74 |
+
gettempdir = tempfile.gettempdir
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/uploadedfile.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Classes representing uploaded files.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
from io import BytesIO
|
| 7 |
+
|
| 8 |
+
from django.conf import settings
|
| 9 |
+
from django.core.files import temp as tempfile
|
| 10 |
+
from django.core.files.base import File
|
| 11 |
+
|
| 12 |
+
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
|
| 13 |
+
'SimpleUploadedFile')
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class UploadedFile(File):
|
| 17 |
+
"""
|
| 18 |
+
An abstract uploaded file (``TemporaryUploadedFile`` and
|
| 19 |
+
``InMemoryUploadedFile`` are the built-in concrete subclasses).
|
| 20 |
+
|
| 21 |
+
An ``UploadedFile`` object behaves somewhat like a file object and
|
| 22 |
+
represents some file data that the user submitted with a form.
|
| 23 |
+
"""
|
| 24 |
+
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
|
| 25 |
+
|
| 26 |
+
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
|
| 27 |
+
super().__init__(file, name)
|
| 28 |
+
self.size = size
|
| 29 |
+
self.content_type = content_type
|
| 30 |
+
self.charset = charset
|
| 31 |
+
self.content_type_extra = content_type_extra
|
| 32 |
+
|
| 33 |
+
def __repr__(self):
|
| 34 |
+
return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type)
|
| 35 |
+
|
| 36 |
+
def _get_name(self):
|
| 37 |
+
return self._name
|
| 38 |
+
|
| 39 |
+
def _set_name(self, name):
|
| 40 |
+
# Sanitize the file name so that it can't be dangerous.
|
| 41 |
+
if name is not None:
|
| 42 |
+
# Just use the basename of the file -- anything else is dangerous.
|
| 43 |
+
name = os.path.basename(name)
|
| 44 |
+
|
| 45 |
+
# File names longer than 255 characters can cause problems on older OSes.
|
| 46 |
+
if len(name) > 255:
|
| 47 |
+
name, ext = os.path.splitext(name)
|
| 48 |
+
ext = ext[:255]
|
| 49 |
+
name = name[:255 - len(ext)] + ext
|
| 50 |
+
|
| 51 |
+
self._name = name
|
| 52 |
+
|
| 53 |
+
name = property(_get_name, _set_name)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class TemporaryUploadedFile(UploadedFile):
|
| 57 |
+
"""
|
| 58 |
+
A file uploaded to a temporary location (i.e. stream-to-disk).
|
| 59 |
+
"""
|
| 60 |
+
def __init__(self, name, content_type, size, charset, content_type_extra=None):
|
| 61 |
+
_, ext = os.path.splitext(name)
|
| 62 |
+
file = tempfile.NamedTemporaryFile(suffix='.upload' + ext, dir=settings.FILE_UPLOAD_TEMP_DIR)
|
| 63 |
+
super().__init__(file, name, content_type, size, charset, content_type_extra)
|
| 64 |
+
|
| 65 |
+
def temporary_file_path(self):
|
| 66 |
+
"""Return the full path of this file."""
|
| 67 |
+
return self.file.name
|
| 68 |
+
|
| 69 |
+
def close(self):
|
| 70 |
+
try:
|
| 71 |
+
return self.file.close()
|
| 72 |
+
except FileNotFoundError:
|
| 73 |
+
# The file was moved or deleted before the tempfile could unlink
|
| 74 |
+
# it. Still sets self.file.close_called and calls
|
| 75 |
+
# self.file.file.close() before the exception.
|
| 76 |
+
pass
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class InMemoryUploadedFile(UploadedFile):
|
| 80 |
+
"""
|
| 81 |
+
A file uploaded into memory (i.e. stream-to-memory).
|
| 82 |
+
"""
|
| 83 |
+
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
|
| 84 |
+
super().__init__(file, name, content_type, size, charset, content_type_extra)
|
| 85 |
+
self.field_name = field_name
|
| 86 |
+
|
| 87 |
+
def open(self, mode=None):
|
| 88 |
+
self.file.seek(0)
|
| 89 |
+
return self
|
| 90 |
+
|
| 91 |
+
def chunks(self, chunk_size=None):
|
| 92 |
+
self.file.seek(0)
|
| 93 |
+
yield self.read()
|
| 94 |
+
|
| 95 |
+
def multiple_chunks(self, chunk_size=None):
|
| 96 |
+
# Since it's in memory, we'll never have multiple chunks.
|
| 97 |
+
return False
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class SimpleUploadedFile(InMemoryUploadedFile):
|
| 101 |
+
"""
|
| 102 |
+
A simple representation of a file, which just has content, size, and a name.
|
| 103 |
+
"""
|
| 104 |
+
def __init__(self, name, content, content_type='text/plain'):
|
| 105 |
+
content = content or b''
|
| 106 |
+
super().__init__(BytesIO(content), None, name, content_type, len(content), None, None)
|
| 107 |
+
|
| 108 |
+
@classmethod
|
| 109 |
+
def from_dict(cls, file_dict):
|
| 110 |
+
"""
|
| 111 |
+
Create a SimpleUploadedFile object from a dictionary with keys:
|
| 112 |
+
- filename
|
| 113 |
+
- content-type
|
| 114 |
+
- content
|
| 115 |
+
"""
|
| 116 |
+
return cls(file_dict['filename'],
|
| 117 |
+
file_dict['content'],
|
| 118 |
+
file_dict.get('content-type', 'text/plain'))
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/uploadhandler.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Base file upload handler classes, and the built-in concrete subclasses
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
|
| 7 |
+
from django.conf import settings
|
| 8 |
+
from django.core.files.uploadedfile import (
|
| 9 |
+
InMemoryUploadedFile, TemporaryUploadedFile,
|
| 10 |
+
)
|
| 11 |
+
from django.utils.module_loading import import_string
|
| 12 |
+
|
| 13 |
+
__all__ = [
|
| 14 |
+
'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler',
|
| 15 |
+
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler',
|
| 16 |
+
'StopFutureHandlers'
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class UploadFileException(Exception):
|
| 21 |
+
"""
|
| 22 |
+
Any error having to do with uploading files.
|
| 23 |
+
"""
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class StopUpload(UploadFileException):
|
| 28 |
+
"""
|
| 29 |
+
This exception is raised when an upload must abort.
|
| 30 |
+
"""
|
| 31 |
+
def __init__(self, connection_reset=False):
|
| 32 |
+
"""
|
| 33 |
+
If ``connection_reset`` is ``True``, Django knows will halt the upload
|
| 34 |
+
without consuming the rest of the upload. This will cause the browser to
|
| 35 |
+
show a "connection reset" error.
|
| 36 |
+
"""
|
| 37 |
+
self.connection_reset = connection_reset
|
| 38 |
+
|
| 39 |
+
def __str__(self):
|
| 40 |
+
if self.connection_reset:
|
| 41 |
+
return 'StopUpload: Halt current upload.'
|
| 42 |
+
else:
|
| 43 |
+
return 'StopUpload: Consume request data, then halt.'
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class SkipFile(UploadFileException):
|
| 47 |
+
"""
|
| 48 |
+
This exception is raised by an upload handler that wants to skip a given file.
|
| 49 |
+
"""
|
| 50 |
+
pass
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class StopFutureHandlers(UploadFileException):
|
| 54 |
+
"""
|
| 55 |
+
Upload handers that have handled a file and do not want future handlers to
|
| 56 |
+
run should raise this exception instead of returning None.
|
| 57 |
+
"""
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class FileUploadHandler:
|
| 62 |
+
"""
|
| 63 |
+
Base class for streaming upload handlers.
|
| 64 |
+
"""
|
| 65 |
+
chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB.
|
| 66 |
+
|
| 67 |
+
def __init__(self, request=None):
|
| 68 |
+
self.file_name = None
|
| 69 |
+
self.content_type = None
|
| 70 |
+
self.content_length = None
|
| 71 |
+
self.charset = None
|
| 72 |
+
self.content_type_extra = None
|
| 73 |
+
self.request = request
|
| 74 |
+
|
| 75 |
+
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
|
| 76 |
+
"""
|
| 77 |
+
Handle the raw input from the client.
|
| 78 |
+
|
| 79 |
+
Parameters:
|
| 80 |
+
|
| 81 |
+
:input_data:
|
| 82 |
+
An object that supports reading via .read().
|
| 83 |
+
:META:
|
| 84 |
+
``request.META``.
|
| 85 |
+
:content_length:
|
| 86 |
+
The (integer) value of the Content-Length header from the
|
| 87 |
+
client.
|
| 88 |
+
:boundary: The boundary from the Content-Type header. Be sure to
|
| 89 |
+
prepend two '--'.
|
| 90 |
+
"""
|
| 91 |
+
pass
|
| 92 |
+
|
| 93 |
+
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
|
| 94 |
+
"""
|
| 95 |
+
Signal that a new file has been started.
|
| 96 |
+
|
| 97 |
+
Warning: As with any data from the client, you should not trust
|
| 98 |
+
content_length (and sometimes won't even get it).
|
| 99 |
+
"""
|
| 100 |
+
self.field_name = field_name
|
| 101 |
+
self.file_name = file_name
|
| 102 |
+
self.content_type = content_type
|
| 103 |
+
self.content_length = content_length
|
| 104 |
+
self.charset = charset
|
| 105 |
+
self.content_type_extra = content_type_extra
|
| 106 |
+
|
| 107 |
+
def receive_data_chunk(self, raw_data, start):
|
| 108 |
+
"""
|
| 109 |
+
Receive data from the streamed upload parser. ``start`` is the position
|
| 110 |
+
in the file of the chunk.
|
| 111 |
+
"""
|
| 112 |
+
raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
|
| 113 |
+
|
| 114 |
+
def file_complete(self, file_size):
|
| 115 |
+
"""
|
| 116 |
+
Signal that a file has completed. File size corresponds to the actual
|
| 117 |
+
size accumulated by all the chunks.
|
| 118 |
+
|
| 119 |
+
Subclasses should return a valid ``UploadedFile`` object.
|
| 120 |
+
"""
|
| 121 |
+
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
|
| 122 |
+
|
| 123 |
+
def upload_complete(self):
|
| 124 |
+
"""
|
| 125 |
+
Signal that the upload is complete. Subclasses should perform cleanup
|
| 126 |
+
that is necessary for this handler.
|
| 127 |
+
"""
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class TemporaryFileUploadHandler(FileUploadHandler):
|
| 132 |
+
"""
|
| 133 |
+
Upload handler that streams data into a temporary file.
|
| 134 |
+
"""
|
| 135 |
+
def new_file(self, *args, **kwargs):
|
| 136 |
+
"""
|
| 137 |
+
Create the file object to append to as data is coming in.
|
| 138 |
+
"""
|
| 139 |
+
super().new_file(*args, **kwargs)
|
| 140 |
+
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
|
| 141 |
+
|
| 142 |
+
def receive_data_chunk(self, raw_data, start):
|
| 143 |
+
self.file.write(raw_data)
|
| 144 |
+
|
| 145 |
+
def file_complete(self, file_size):
|
| 146 |
+
self.file.seek(0)
|
| 147 |
+
self.file.size = file_size
|
| 148 |
+
return self.file
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class MemoryFileUploadHandler(FileUploadHandler):
|
| 152 |
+
"""
|
| 153 |
+
File upload handler to stream uploads into memory (used for small files).
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
|
| 157 |
+
"""
|
| 158 |
+
Use the content_length to signal whether or not this handler should be
|
| 159 |
+
used.
|
| 160 |
+
"""
|
| 161 |
+
# Check the content-length header to see if we should
|
| 162 |
+
# If the post is too large, we cannot use the Memory handler.
|
| 163 |
+
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
|
| 164 |
+
self.activated = False
|
| 165 |
+
else:
|
| 166 |
+
self.activated = True
|
| 167 |
+
|
| 168 |
+
def new_file(self, *args, **kwargs):
|
| 169 |
+
super().new_file(*args, **kwargs)
|
| 170 |
+
if self.activated:
|
| 171 |
+
self.file = BytesIO()
|
| 172 |
+
raise StopFutureHandlers()
|
| 173 |
+
|
| 174 |
+
def receive_data_chunk(self, raw_data, start):
|
| 175 |
+
"""Add the data to the BytesIO file."""
|
| 176 |
+
if self.activated:
|
| 177 |
+
self.file.write(raw_data)
|
| 178 |
+
else:
|
| 179 |
+
return raw_data
|
| 180 |
+
|
| 181 |
+
def file_complete(self, file_size):
|
| 182 |
+
"""Return a file object if this handler is activated."""
|
| 183 |
+
if not self.activated:
|
| 184 |
+
return
|
| 185 |
+
|
| 186 |
+
self.file.seek(0)
|
| 187 |
+
return InMemoryUploadedFile(
|
| 188 |
+
file=self.file,
|
| 189 |
+
field_name=self.field_name,
|
| 190 |
+
name=self.file_name,
|
| 191 |
+
content_type=self.content_type,
|
| 192 |
+
size=file_size,
|
| 193 |
+
charset=self.charset,
|
| 194 |
+
content_type_extra=self.content_type_extra
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def load_handler(path, *args, **kwargs):
|
| 199 |
+
"""
|
| 200 |
+
Given a path to a handler, return an instance of that handler.
|
| 201 |
+
|
| 202 |
+
E.g.::
|
| 203 |
+
>>> from django.http import HttpRequest
|
| 204 |
+
>>> request = HttpRequest()
|
| 205 |
+
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
|
| 206 |
+
<TemporaryFileUploadHandler object at 0x...>
|
| 207 |
+
"""
|
| 208 |
+
return import_string(path)(*args, **kwargs)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/files/utils.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class FileProxyMixin:
|
| 2 |
+
"""
|
| 3 |
+
A mixin class used to forward file methods to an underlaying file
|
| 4 |
+
object. The internal file object has to be called "file"::
|
| 5 |
+
|
| 6 |
+
class FileProxy(FileProxyMixin):
|
| 7 |
+
def __init__(self, file):
|
| 8 |
+
self.file = file
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
encoding = property(lambda self: self.file.encoding)
|
| 12 |
+
fileno = property(lambda self: self.file.fileno)
|
| 13 |
+
flush = property(lambda self: self.file.flush)
|
| 14 |
+
isatty = property(lambda self: self.file.isatty)
|
| 15 |
+
newlines = property(lambda self: self.file.newlines)
|
| 16 |
+
read = property(lambda self: self.file.read)
|
| 17 |
+
readinto = property(lambda self: self.file.readinto)
|
| 18 |
+
readline = property(lambda self: self.file.readline)
|
| 19 |
+
readlines = property(lambda self: self.file.readlines)
|
| 20 |
+
seek = property(lambda self: self.file.seek)
|
| 21 |
+
tell = property(lambda self: self.file.tell)
|
| 22 |
+
truncate = property(lambda self: self.file.truncate)
|
| 23 |
+
write = property(lambda self: self.file.write)
|
| 24 |
+
writelines = property(lambda self: self.file.writelines)
|
| 25 |
+
|
| 26 |
+
@property
|
| 27 |
+
def closed(self):
|
| 28 |
+
return not self.file or self.file.closed
|
| 29 |
+
|
| 30 |
+
def readable(self):
|
| 31 |
+
if self.closed:
|
| 32 |
+
return False
|
| 33 |
+
if hasattr(self.file, 'readable'):
|
| 34 |
+
return self.file.readable()
|
| 35 |
+
return True
|
| 36 |
+
|
| 37 |
+
def writable(self):
|
| 38 |
+
if self.closed:
|
| 39 |
+
return False
|
| 40 |
+
if hasattr(self.file, 'writable'):
|
| 41 |
+
return self.file.writable()
|
| 42 |
+
return 'w' in getattr(self.file, 'mode', '')
|
| 43 |
+
|
| 44 |
+
def seekable(self):
|
| 45 |
+
if self.closed:
|
| 46 |
+
return False
|
| 47 |
+
if hasattr(self.file, 'seekable'):
|
| 48 |
+
return self.file.seekable()
|
| 49 |
+
return True
|
| 50 |
+
|
| 51 |
+
def __iter__(self):
|
| 52 |
+
return iter(self.file)
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/handlers/base.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import types
|
| 3 |
+
|
| 4 |
+
from django.conf import settings
|
| 5 |
+
from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed
|
| 6 |
+
from django.db import connections, transaction
|
| 7 |
+
from django.urls import get_resolver, set_urlconf
|
| 8 |
+
from django.utils.module_loading import import_string
|
| 9 |
+
|
| 10 |
+
from .exception import convert_exception_to_response, get_exception_response
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger('django.request')
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class BaseHandler:
|
| 16 |
+
_request_middleware = None
|
| 17 |
+
_view_middleware = None
|
| 18 |
+
_template_response_middleware = None
|
| 19 |
+
_response_middleware = None
|
| 20 |
+
_exception_middleware = None
|
| 21 |
+
_middleware_chain = None
|
| 22 |
+
|
| 23 |
+
def load_middleware(self):
|
| 24 |
+
"""
|
| 25 |
+
Populate middleware lists from settings.MIDDLEWARE.
|
| 26 |
+
|
| 27 |
+
Must be called after the environment is fixed (see __call__ in subclasses).
|
| 28 |
+
"""
|
| 29 |
+
self._request_middleware = []
|
| 30 |
+
self._view_middleware = []
|
| 31 |
+
self._template_response_middleware = []
|
| 32 |
+
self._response_middleware = []
|
| 33 |
+
self._exception_middleware = []
|
| 34 |
+
|
| 35 |
+
handler = convert_exception_to_response(self._get_response)
|
| 36 |
+
for middleware_path in reversed(settings.MIDDLEWARE):
|
| 37 |
+
middleware = import_string(middleware_path)
|
| 38 |
+
try:
|
| 39 |
+
mw_instance = middleware(handler)
|
| 40 |
+
except MiddlewareNotUsed as exc:
|
| 41 |
+
if settings.DEBUG:
|
| 42 |
+
if str(exc):
|
| 43 |
+
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
|
| 44 |
+
else:
|
| 45 |
+
logger.debug('MiddlewareNotUsed: %r', middleware_path)
|
| 46 |
+
continue
|
| 47 |
+
|
| 48 |
+
if mw_instance is None:
|
| 49 |
+
raise ImproperlyConfigured(
|
| 50 |
+
'Middleware factory %s returned None.' % middleware_path
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
if hasattr(mw_instance, 'process_view'):
|
| 54 |
+
self._view_middleware.insert(0, mw_instance.process_view)
|
| 55 |
+
if hasattr(mw_instance, 'process_template_response'):
|
| 56 |
+
self._template_response_middleware.append(mw_instance.process_template_response)
|
| 57 |
+
if hasattr(mw_instance, 'process_exception'):
|
| 58 |
+
self._exception_middleware.append(mw_instance.process_exception)
|
| 59 |
+
|
| 60 |
+
handler = convert_exception_to_response(mw_instance)
|
| 61 |
+
|
| 62 |
+
# We only assign to this when initialization is complete as it is used
|
| 63 |
+
# as a flag for initialization being complete.
|
| 64 |
+
self._middleware_chain = handler
|
| 65 |
+
|
| 66 |
+
def make_view_atomic(self, view):
|
| 67 |
+
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
|
| 68 |
+
for db in connections.all():
|
| 69 |
+
if db.settings_dict['ATOMIC_REQUESTS'] and db.alias not in non_atomic_requests:
|
| 70 |
+
view = transaction.atomic(using=db.alias)(view)
|
| 71 |
+
return view
|
| 72 |
+
|
| 73 |
+
def get_exception_response(self, request, resolver, status_code, exception):
|
| 74 |
+
return get_exception_response(request, resolver, status_code, exception, self.__class__)
|
| 75 |
+
|
| 76 |
+
def get_response(self, request):
|
| 77 |
+
"""Return an HttpResponse object for the given HttpRequest."""
|
| 78 |
+
# Setup default url resolver for this thread
|
| 79 |
+
set_urlconf(settings.ROOT_URLCONF)
|
| 80 |
+
|
| 81 |
+
response = self._middleware_chain(request)
|
| 82 |
+
|
| 83 |
+
response._closable_objects.append(request)
|
| 84 |
+
|
| 85 |
+
# If the exception handler returns a TemplateResponse that has not
|
| 86 |
+
# been rendered, force it to be rendered.
|
| 87 |
+
if not getattr(response, 'is_rendered', True) and callable(getattr(response, 'render', None)):
|
| 88 |
+
response = response.render()
|
| 89 |
+
|
| 90 |
+
if response.status_code == 404:
|
| 91 |
+
logger.warning(
|
| 92 |
+
'Not Found: %s', request.path,
|
| 93 |
+
extra={'status_code': 404, 'request': request},
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
return response
|
| 97 |
+
|
| 98 |
+
def _get_response(self, request):
|
| 99 |
+
"""
|
| 100 |
+
Resolve and call the view, then apply view, exception, and
|
| 101 |
+
template_response middleware. This method is everything that happens
|
| 102 |
+
inside the request/response middleware.
|
| 103 |
+
"""
|
| 104 |
+
response = None
|
| 105 |
+
|
| 106 |
+
if hasattr(request, 'urlconf'):
|
| 107 |
+
urlconf = request.urlconf
|
| 108 |
+
set_urlconf(urlconf)
|
| 109 |
+
resolver = get_resolver(urlconf)
|
| 110 |
+
else:
|
| 111 |
+
resolver = get_resolver()
|
| 112 |
+
|
| 113 |
+
resolver_match = resolver.resolve(request.path_info)
|
| 114 |
+
callback, callback_args, callback_kwargs = resolver_match
|
| 115 |
+
request.resolver_match = resolver_match
|
| 116 |
+
|
| 117 |
+
# Apply view middleware
|
| 118 |
+
for middleware_method in self._view_middleware:
|
| 119 |
+
response = middleware_method(request, callback, callback_args, callback_kwargs)
|
| 120 |
+
if response:
|
| 121 |
+
break
|
| 122 |
+
|
| 123 |
+
if response is None:
|
| 124 |
+
wrapped_callback = self.make_view_atomic(callback)
|
| 125 |
+
try:
|
| 126 |
+
response = wrapped_callback(request, *callback_args, **callback_kwargs)
|
| 127 |
+
except Exception as e:
|
| 128 |
+
response = self.process_exception_by_middleware(e, request)
|
| 129 |
+
|
| 130 |
+
# Complain if the view returned None (a common error).
|
| 131 |
+
if response is None:
|
| 132 |
+
if isinstance(callback, types.FunctionType): # FBV
|
| 133 |
+
view_name = callback.__name__
|
| 134 |
+
else: # CBV
|
| 135 |
+
view_name = callback.__class__.__name__ + '.__call__'
|
| 136 |
+
|
| 137 |
+
raise ValueError(
|
| 138 |
+
"The view %s.%s didn't return an HttpResponse object. It "
|
| 139 |
+
"returned None instead." % (callback.__module__, view_name)
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
# If the response supports deferred rendering, apply template
|
| 143 |
+
# response middleware and then render the response
|
| 144 |
+
elif hasattr(response, 'render') and callable(response.render):
|
| 145 |
+
for middleware_method in self._template_response_middleware:
|
| 146 |
+
response = middleware_method(request, response)
|
| 147 |
+
# Complain if the template response middleware returned None (a common error).
|
| 148 |
+
if response is None:
|
| 149 |
+
raise ValueError(
|
| 150 |
+
"%s.process_template_response didn't return an "
|
| 151 |
+
"HttpResponse object. It returned None instead."
|
| 152 |
+
% (middleware_method.__self__.__class__.__name__)
|
| 153 |
+
)
|
| 154 |
+
|
| 155 |
+
try:
|
| 156 |
+
response = response.render()
|
| 157 |
+
except Exception as e:
|
| 158 |
+
response = self.process_exception_by_middleware(e, request)
|
| 159 |
+
|
| 160 |
+
return response
|
| 161 |
+
|
| 162 |
+
def process_exception_by_middleware(self, exception, request):
|
| 163 |
+
"""
|
| 164 |
+
Pass the exception to the exception middleware. If no middleware
|
| 165 |
+
return a response for this exception, raise it.
|
| 166 |
+
"""
|
| 167 |
+
for middleware_method in self._exception_middleware:
|
| 168 |
+
response = middleware_method(request, exception)
|
| 169 |
+
if response:
|
| 170 |
+
return response
|
| 171 |
+
raise
|
A-news-Agrregation-system-master/myvenv/lib64/python3.6/site-packages/django/core/handlers/exception.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from functools import wraps
|
| 4 |
+
|
| 5 |
+
from django.conf import settings
|
| 6 |
+
from django.core import signals
|
| 7 |
+
from django.core.exceptions import (
|
| 8 |
+
PermissionDenied, RequestDataTooBig, SuspiciousOperation,
|
| 9 |
+
TooManyFieldsSent,
|
| 10 |
+
)
|
| 11 |
+
from django.http import Http404
|
| 12 |
+
from django.http.multipartparser import MultiPartParserError
|
| 13 |
+
from django.urls import get_resolver, get_urlconf
|
| 14 |
+
from django.views import debug
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger('django.request')
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def convert_exception_to_response(get_response):
|
| 20 |
+
"""
|
| 21 |
+
Wrap the given get_response callable in exception-to-response conversion.
|
| 22 |
+
|
| 23 |
+
All exceptions will be converted. All known 4xx exceptions (Http404,
|
| 24 |
+
PermissionDenied, MultiPartParserError, SuspiciousOperation) will be
|
| 25 |
+
converted to the appropriate response, and all other exceptions will be
|
| 26 |
+
converted to 500 responses.
|
| 27 |
+
|
| 28 |
+
This decorator is automatically applied to all middleware to ensure that
|
| 29 |
+
no middleware leaks an exception and that the next middleware in the stack
|
| 30 |
+
can rely on getting a response instead of an exception.
|
| 31 |
+
"""
|
| 32 |
+
@wraps(get_response)
|
| 33 |
+
def inner(request):
|
| 34 |
+
try:
|
| 35 |
+
response = get_response(request)
|
| 36 |
+
except Exception as exc:
|
| 37 |
+
response = response_for_exception(request, exc)
|
| 38 |
+
return response
|
| 39 |
+
return inner
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def response_for_exception(request, exc):
|
| 43 |
+
if isinstance(exc, Http404):
|
| 44 |
+
if settings.DEBUG:
|
| 45 |
+
response = debug.technical_404_response(request, exc)
|
| 46 |
+
else:
|
| 47 |
+
response = get_exception_response(request, get_resolver(get_urlconf()), 404, exc)
|
| 48 |
+
|
| 49 |
+
elif isinstance(exc, PermissionDenied):
|
| 50 |
+
logger.warning(
|
| 51 |
+
'Forbidden (Permission denied): %s', request.path,
|
| 52 |
+
extra={'status_code': 403, 'request': request},
|
| 53 |
+
)
|
| 54 |
+
response = get_exception_response(request, get_resolver(get_urlconf()), 403, exc)
|
| 55 |
+
|
| 56 |
+
elif isinstance(exc, MultiPartParserError):
|
| 57 |
+
logger.warning(
|
| 58 |
+
'Bad request (Unable to parse request body): %s', request.path,
|
| 59 |
+
extra={'status_code': 400, 'request': request},
|
| 60 |
+
)
|
| 61 |
+
response = get_exception_response(request, get_resolver(get_urlconf()), 400, exc)
|
| 62 |
+
|
| 63 |
+
elif isinstance(exc, SuspiciousOperation):
|
| 64 |
+
if isinstance(exc, (RequestDataTooBig, TooManyFieldsSent)):
|
| 65 |
+
# POST data can't be accessed again, otherwise the original
|
| 66 |
+
# exception would be raised.
|
| 67 |
+
request._mark_post_parse_error()
|
| 68 |
+
|
| 69 |
+
# The request logger receives events for any problematic request
|
| 70 |
+
# The security logger receives events for all SuspiciousOperations
|
| 71 |
+
security_logger = logging.getLogger('django.security.%s' % exc.__class__.__name__)
|
| 72 |
+
security_logger.error(
|
| 73 |
+
str(exc),
|
| 74 |
+
extra={'status_code': 400, 'request': request},
|
| 75 |
+
)
|
| 76 |
+
if settings.DEBUG:
|
| 77 |
+
response = debug.technical_500_response(request, *sys.exc_info(), status_code=400)
|
| 78 |
+
else:
|
| 79 |
+
response = get_exception_response(request, get_resolver(get_urlconf()), 400, exc)
|
| 80 |
+
|
| 81 |
+
elif isinstance(exc, SystemExit):
|
| 82 |
+
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
|
| 83 |
+
raise
|
| 84 |
+
|
| 85 |
+
else:
|
| 86 |
+
signals.got_request_exception.send(sender=None, request=request)
|
| 87 |
+
response = handle_uncaught_exception(request, get_resolver(get_urlconf()), sys.exc_info())
|
| 88 |
+
|
| 89 |
+
# Force a TemplateResponse to be rendered.
|
| 90 |
+
if not getattr(response, 'is_rendered', True) and callable(getattr(response, 'render', None)):
|
| 91 |
+
response = response.render()
|
| 92 |
+
|
| 93 |
+
return response
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def get_exception_response(request, resolver, status_code, exception, sender=None):
|
| 97 |
+
try:
|
| 98 |
+
callback, param_dict = resolver.resolve_error_handler(status_code)
|
| 99 |
+
response = callback(request, **dict(param_dict, exception=exception))
|
| 100 |
+
except Exception:
|
| 101 |
+
signals.got_request_exception.send(sender=sender, request=request)
|
| 102 |
+
response = handle_uncaught_exception(request, resolver, sys.exc_info())
|
| 103 |
+
|
| 104 |
+
return response
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def handle_uncaught_exception(request, resolver, exc_info):
|
| 108 |
+
"""
|
| 109 |
+
Processing for any otherwise uncaught exceptions (those that will
|
| 110 |
+
generate HTTP 500 responses).
|
| 111 |
+
"""
|
| 112 |
+
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
|
| 113 |
+
raise
|
| 114 |
+
|
| 115 |
+
logger.error(
|
| 116 |
+
'Internal Server Error: %s', request.path,
|
| 117 |
+
exc_info=exc_info,
|
| 118 |
+
extra={'status_code': 500, 'request': request},
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
if settings.DEBUG:
|
| 122 |
+
return debug.technical_500_response(request, *exc_info)
|
| 123 |
+
|
| 124 |
+
# Return an HttpResponse that displays a friendly error message.
|
| 125 |
+
callback, param_dict = resolver.resolve_error_handler(500)
|
| 126 |
+
return callback(request, **param_dict)
|