docker setup
This commit is contained in:
@ -0,0 +1,13 @@
|
||||
from django.apps import AppConfig
|
||||
from django.contrib.staticfiles.checks import check_finders
|
||||
from django.core import checks
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class StaticFilesConfig(AppConfig):
|
||||
name = "django.contrib.staticfiles"
|
||||
verbose_name = _("Static Files")
|
||||
ignore_patterns = ["CVS", ".*", "*~"]
|
||||
|
||||
def ready(self):
|
||||
checks.register(check_finders, checks.Tags.staticfiles)
|
@ -0,0 +1,14 @@
|
||||
from django.contrib.staticfiles.finders import get_finders
|
||||
|
||||
|
||||
def check_finders(app_configs=None, **kwargs):
|
||||
"""Check all registered staticfiles finders."""
|
||||
errors = []
|
||||
for finder in get_finders():
|
||||
try:
|
||||
finder_errors = finder.check()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
else:
|
||||
errors.extend(finder_errors)
|
||||
return errors
|
@ -0,0 +1,326 @@
|
||||
import functools
|
||||
import os
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles import utils
|
||||
from django.core.checks import Error, Warning
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.files.storage import FileSystemStorage, Storage, default_storage
|
||||
from django.utils._os import safe_join
|
||||
from django.utils.functional import LazyObject, empty
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
# To keep track on which directories the finder has searched the static files.
|
||||
searched_locations = []
|
||||
|
||||
|
||||
class BaseFinder:
|
||||
"""
|
||||
A base file finder to be used for custom staticfiles finder classes.
|
||||
"""
|
||||
|
||||
def check(self, **kwargs):
|
||||
raise NotImplementedError(
|
||||
"subclasses may provide a check() method to verify the finder is "
|
||||
"configured correctly."
|
||||
)
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
Given a relative file path, find an absolute file path.
|
||||
|
||||
If the ``all`` parameter is False (default) return only the first found
|
||||
file path; if True, return a list of all found files paths.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"subclasses of BaseFinder must provide a find() method"
|
||||
)
|
||||
|
||||
def list(self, ignore_patterns):
|
||||
"""
|
||||
Given an optional list of paths to ignore, return a two item iterable
|
||||
consisting of the relative path and storage instance.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"subclasses of BaseFinder must provide a list() method"
|
||||
)
|
||||
|
||||
|
||||
class FileSystemFinder(BaseFinder):
|
||||
"""
|
||||
A static files finder that uses the ``STATICFILES_DIRS`` setting
|
||||
to locate files.
|
||||
"""
|
||||
|
||||
def __init__(self, app_names=None, *args, **kwargs):
|
||||
# List of locations with static files
|
||||
self.locations = []
|
||||
# Maps dir paths to an appropriate storage instance
|
||||
self.storages = {}
|
||||
for root in settings.STATICFILES_DIRS:
|
||||
if isinstance(root, (list, tuple)):
|
||||
prefix, root = root
|
||||
else:
|
||||
prefix = ""
|
||||
if (prefix, root) not in self.locations:
|
||||
self.locations.append((prefix, root))
|
||||
for prefix, root in self.locations:
|
||||
filesystem_storage = FileSystemStorage(location=root)
|
||||
filesystem_storage.prefix = prefix
|
||||
self.storages[root] = filesystem_storage
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def check(self, **kwargs):
|
||||
errors = []
|
||||
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
|
||||
errors.append(
|
||||
Error(
|
||||
"The STATICFILES_DIRS setting is not a tuple or list.",
|
||||
hint="Perhaps you forgot a trailing comma?",
|
||||
id="staticfiles.E001",
|
||||
)
|
||||
)
|
||||
return errors
|
||||
for root in settings.STATICFILES_DIRS:
|
||||
if isinstance(root, (list, tuple)):
|
||||
prefix, root = root
|
||||
if prefix.endswith("/"):
|
||||
errors.append(
|
||||
Error(
|
||||
"The prefix %r in the STATICFILES_DIRS setting must "
|
||||
"not end with a slash." % prefix,
|
||||
id="staticfiles.E003",
|
||||
)
|
||||
)
|
||||
if settings.STATIC_ROOT and os.path.abspath(
|
||||
settings.STATIC_ROOT
|
||||
) == os.path.abspath(root):
|
||||
errors.append(
|
||||
Error(
|
||||
"The STATICFILES_DIRS setting should not contain the "
|
||||
"STATIC_ROOT setting.",
|
||||
id="staticfiles.E002",
|
||||
)
|
||||
)
|
||||
if not os.path.isdir(root):
|
||||
errors.append(
|
||||
Warning(
|
||||
f"The directory '{root}' in the STATICFILES_DIRS setting "
|
||||
f"does not exist.",
|
||||
id="staticfiles.W004",
|
||||
)
|
||||
)
|
||||
return errors
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
Look for files in the extra locations as defined in STATICFILES_DIRS.
|
||||
"""
|
||||
matches = []
|
||||
for prefix, root in self.locations:
|
||||
if root not in searched_locations:
|
||||
searched_locations.append(root)
|
||||
matched_path = self.find_location(root, path, prefix)
|
||||
if matched_path:
|
||||
if not all:
|
||||
return matched_path
|
||||
matches.append(matched_path)
|
||||
return matches
|
||||
|
||||
def find_location(self, root, path, prefix=None):
|
||||
"""
|
||||
Find a requested static file in a location and return the found
|
||||
absolute path (or ``None`` if no match).
|
||||
"""
|
||||
if prefix:
|
||||
prefix = "%s%s" % (prefix, os.sep)
|
||||
if not path.startswith(prefix):
|
||||
return None
|
||||
path = path[len(prefix) :]
|
||||
path = safe_join(root, path)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
def list(self, ignore_patterns):
|
||||
"""
|
||||
List all files in all locations.
|
||||
"""
|
||||
for prefix, root in self.locations:
|
||||
# Skip nonexistent directories.
|
||||
if os.path.isdir(root):
|
||||
storage = self.storages[root]
|
||||
for path in utils.get_files(storage, ignore_patterns):
|
||||
yield path, storage
|
||||
|
||||
|
||||
class AppDirectoriesFinder(BaseFinder):
|
||||
"""
|
||||
A static files finder that looks in the directory of each app as
|
||||
specified in the source_dir attribute.
|
||||
"""
|
||||
|
||||
storage_class = FileSystemStorage
|
||||
source_dir = "static"
|
||||
|
||||
def __init__(self, app_names=None, *args, **kwargs):
|
||||
# The list of apps that are handled
|
||||
self.apps = []
|
||||
# Mapping of app names to storage instances
|
||||
self.storages = {}
|
||||
app_configs = apps.get_app_configs()
|
||||
if app_names:
|
||||
app_names = set(app_names)
|
||||
app_configs = [ac for ac in app_configs if ac.name in app_names]
|
||||
for app_config in app_configs:
|
||||
app_storage = self.storage_class(
|
||||
os.path.join(app_config.path, self.source_dir)
|
||||
)
|
||||
if os.path.isdir(app_storage.location):
|
||||
self.storages[app_config.name] = app_storage
|
||||
if app_config.name not in self.apps:
|
||||
self.apps.append(app_config.name)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def list(self, ignore_patterns):
|
||||
"""
|
||||
List all files in all app storages.
|
||||
"""
|
||||
for storage in self.storages.values():
|
||||
if storage.exists(""): # check if storage location exists
|
||||
for path in utils.get_files(storage, ignore_patterns):
|
||||
yield path, storage
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
Look for files in the app directories.
|
||||
"""
|
||||
matches = []
|
||||
for app in self.apps:
|
||||
app_location = self.storages[app].location
|
||||
if app_location not in searched_locations:
|
||||
searched_locations.append(app_location)
|
||||
match = self.find_in_app(app, path)
|
||||
if match:
|
||||
if not all:
|
||||
return match
|
||||
matches.append(match)
|
||||
return matches
|
||||
|
||||
def find_in_app(self, app, path):
|
||||
"""
|
||||
Find a requested static file in an app's static locations.
|
||||
"""
|
||||
storage = self.storages.get(app)
|
||||
# Only try to find a file if the source dir actually exists.
|
||||
if storage and storage.exists(path):
|
||||
matched_path = storage.path(path)
|
||||
if matched_path:
|
||||
return matched_path
|
||||
|
||||
|
||||
class BaseStorageFinder(BaseFinder):
|
||||
"""
|
||||
A base static files finder to be used to extended
|
||||
with an own storage class.
|
||||
"""
|
||||
|
||||
storage = None
|
||||
|
||||
def __init__(self, storage=None, *args, **kwargs):
|
||||
if storage is not None:
|
||||
self.storage = storage
|
||||
if self.storage is None:
|
||||
raise ImproperlyConfigured(
|
||||
"The staticfiles storage finder %r "
|
||||
"doesn't have a storage class "
|
||||
"assigned." % self.__class__
|
||||
)
|
||||
# Make sure we have a storage instance here.
|
||||
if not isinstance(self.storage, (Storage, LazyObject)):
|
||||
self.storage = self.storage()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def find(self, path, all=False):
|
||||
"""
|
||||
Look for files in the default file storage, if it's local.
|
||||
"""
|
||||
try:
|
||||
self.storage.path("")
|
||||
except NotImplementedError:
|
||||
pass
|
||||
else:
|
||||
if self.storage.location not in searched_locations:
|
||||
searched_locations.append(self.storage.location)
|
||||
if self.storage.exists(path):
|
||||
match = self.storage.path(path)
|
||||
if all:
|
||||
match = [match]
|
||||
return match
|
||||
return []
|
||||
|
||||
def list(self, ignore_patterns):
|
||||
"""
|
||||
List all files of the storage.
|
||||
"""
|
||||
for path in utils.get_files(self.storage, ignore_patterns):
|
||||
yield path, self.storage
|
||||
|
||||
|
||||
class DefaultStorageFinder(BaseStorageFinder):
|
||||
"""
|
||||
A static files finder that uses the default storage backend.
|
||||
"""
|
||||
|
||||
storage = default_storage
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
base_location = getattr(self.storage, "base_location", empty)
|
||||
if not base_location:
|
||||
raise ImproperlyConfigured(
|
||||
"The storage backend of the "
|
||||
"staticfiles finder %r doesn't have "
|
||||
"a valid location." % self.__class__
|
||||
)
|
||||
|
||||
|
||||
def find(path, all=False):
|
||||
"""
|
||||
Find a static file with the given path using all enabled finders.
|
||||
|
||||
If ``all`` is ``False`` (default), return the first matching
|
||||
absolute path (or ``None`` if no match). Otherwise return a list.
|
||||
"""
|
||||
searched_locations[:] = []
|
||||
matches = []
|
||||
for finder in get_finders():
|
||||
result = finder.find(path, all=all)
|
||||
if not all and result:
|
||||
return result
|
||||
if not isinstance(result, (list, tuple)):
|
||||
result = [result]
|
||||
matches.extend(result)
|
||||
if matches:
|
||||
return matches
|
||||
# No match.
|
||||
return [] if all else None
|
||||
|
||||
|
||||
def get_finders():
|
||||
for finder_path in settings.STATICFILES_FINDERS:
|
||||
yield get_finder(finder_path)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def get_finder(import_path):
|
||||
"""
|
||||
Import the staticfiles finder class described by import_path, where
|
||||
import_path is the full Python path to the class.
|
||||
"""
|
||||
Finder = import_string(import_path)
|
||||
if not issubclass(Finder, BaseFinder):
|
||||
raise ImproperlyConfigured(
|
||||
'Finder "%s" is not a subclass of "%s"' % (Finder, BaseFinder)
|
||||
)
|
||||
return Finder()
|
@ -0,0 +1,115 @@
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import url2pathname
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles import utils
|
||||
from django.contrib.staticfiles.views import serve
|
||||
from django.core.handlers.asgi import ASGIHandler
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.core.handlers.wsgi import WSGIHandler, get_path_info
|
||||
from django.http import Http404
|
||||
|
||||
|
||||
class StaticFilesHandlerMixin:
|
||||
"""
|
||||
Common methods used by WSGI and ASGI handlers.
|
||||
"""
|
||||
|
||||
# May be used to differentiate between handler types (e.g. in a
|
||||
# request_finished signal)
|
||||
handles_files = True
|
||||
|
||||
def load_middleware(self):
|
||||
# Middleware are already loaded for self.application; no need to reload
|
||||
# them for self.
|
||||
pass
|
||||
|
||||
def get_base_url(self):
|
||||
utils.check_settings()
|
||||
return settings.STATIC_URL
|
||||
|
||||
def _should_handle(self, path):
|
||||
"""
|
||||
Check if the path should be handled. Ignore the path if:
|
||||
* the host is provided as part of the base_url
|
||||
* the request's path isn't under the media path (or equal)
|
||||
"""
|
||||
return path.startswith(self.base_url[2]) and not self.base_url[1]
|
||||
|
||||
def file_path(self, url):
|
||||
"""
|
||||
Return the relative path to the media file on disk for the given URL.
|
||||
"""
|
||||
relative_url = url[len(self.base_url[2]) :]
|
||||
return url2pathname(relative_url)
|
||||
|
||||
def serve(self, request):
|
||||
"""Serve the request path."""
|
||||
return serve(request, self.file_path(request.path), insecure=True)
|
||||
|
||||
def get_response(self, request):
|
||||
try:
|
||||
return self.serve(request)
|
||||
except Http404 as e:
|
||||
return response_for_exception(request, e)
|
||||
|
||||
async def get_response_async(self, request):
|
||||
try:
|
||||
return await sync_to_async(self.serve, thread_sensitive=False)(request)
|
||||
except Http404 as e:
|
||||
return await sync_to_async(response_for_exception, thread_sensitive=False)(
|
||||
request, e
|
||||
)
|
||||
|
||||
|
||||
class StaticFilesHandler(StaticFilesHandlerMixin, WSGIHandler):
|
||||
"""
|
||||
WSGI middleware that intercepts calls to the static files directory, as
|
||||
defined by the STATIC_URL setting, and serves those files.
|
||||
"""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
self.base_url = urlparse(self.get_base_url())
|
||||
super().__init__()
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
if not self._should_handle(get_path_info(environ)):
|
||||
return self.application(environ, start_response)
|
||||
return super().__call__(environ, start_response)
|
||||
|
||||
|
||||
class ASGIStaticFilesHandler(StaticFilesHandlerMixin, ASGIHandler):
|
||||
"""
|
||||
ASGI application which wraps another and intercepts requests for static
|
||||
files, passing them off to Django's static file serving.
|
||||
"""
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
self.base_url = urlparse(self.get_base_url())
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
# Only even look at HTTP requests
|
||||
if scope["type"] == "http" and self._should_handle(scope["path"]):
|
||||
# Serve static content
|
||||
# (the one thing super() doesn't do is __call__, apparently)
|
||||
return await super().__call__(scope, receive, send)
|
||||
# Hand off to the main app
|
||||
return await self.application(scope, receive, send)
|
||||
|
||||
async def get_response_async(self, request):
|
||||
response = await super().get_response_async(request)
|
||||
response._resource_closers.append(request.close)
|
||||
# FileResponse is not async compatible.
|
||||
if response.streaming and not response.is_async:
|
||||
_iterator = response.streaming_content
|
||||
|
||||
async def awrapper():
|
||||
for part in await sync_to_async(list)(_iterator):
|
||||
yield part
|
||||
|
||||
response.streaming_content = awrapper()
|
||||
return response
|
@ -0,0 +1,379 @@
|
||||
import os
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.staticfiles.finders import get_finders
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
from django.core.checks import Tags
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Copies or symlinks static files from different locations to the
|
||||
settings.STATIC_ROOT.
|
||||
"""
|
||||
|
||||
help = "Collect static files in a single location."
|
||||
requires_system_checks = [Tags.staticfiles]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.copied_files = []
|
||||
self.symlinked_files = []
|
||||
self.unmodified_files = []
|
||||
self.post_processed_files = []
|
||||
self.storage = staticfiles_storage
|
||||
self.style = no_style()
|
||||
|
||||
@cached_property
|
||||
def local(self):
|
||||
try:
|
||||
self.storage.path("")
|
||||
except NotImplementedError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--noinput",
|
||||
"--no-input",
|
||||
action="store_false",
|
||||
dest="interactive",
|
||||
help="Do NOT prompt the user for input of any kind.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-post-process",
|
||||
action="store_false",
|
||||
dest="post_process",
|
||||
help="Do NOT post process collected files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--ignore",
|
||||
action="append",
|
||||
default=[],
|
||||
dest="ignore_patterns",
|
||||
metavar="PATTERN",
|
||||
help="Ignore files or directories matching this glob-style "
|
||||
"pattern. Use multiple times to ignore more.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Do everything except modify the filesystem.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--clear",
|
||||
action="store_true",
|
||||
help="Clear the existing files using the storage "
|
||||
"before trying to copy or link the original file.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--link",
|
||||
action="store_true",
|
||||
help="Create a symbolic link to each file instead of copying.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-default-ignore",
|
||||
action="store_false",
|
||||
dest="use_default_ignore_patterns",
|
||||
help=(
|
||||
"Don't ignore the common private glob-style patterns (defaults to "
|
||||
"'CVS', '.*' and '*~')."
|
||||
),
|
||||
)
|
||||
|
||||
def set_options(self, **options):
|
||||
"""
|
||||
Set instance variables based on an options dict
|
||||
"""
|
||||
self.interactive = options["interactive"]
|
||||
self.verbosity = options["verbosity"]
|
||||
self.symlink = options["link"]
|
||||
self.clear = options["clear"]
|
||||
self.dry_run = options["dry_run"]
|
||||
ignore_patterns = options["ignore_patterns"]
|
||||
if options["use_default_ignore_patterns"]:
|
||||
ignore_patterns += apps.get_app_config("staticfiles").ignore_patterns
|
||||
self.ignore_patterns = list({os.path.normpath(p) for p in ignore_patterns})
|
||||
self.post_process = options["post_process"]
|
||||
|
||||
def collect(self):
|
||||
"""
|
||||
Perform the bulk of the work of collectstatic.
|
||||
|
||||
Split off from handle() to facilitate testing.
|
||||
"""
|
||||
if self.symlink and not self.local:
|
||||
raise CommandError("Can't symlink to a remote destination.")
|
||||
|
||||
if self.clear:
|
||||
self.clear_dir("")
|
||||
|
||||
if self.symlink:
|
||||
handler = self.link_file
|
||||
else:
|
||||
handler = self.copy_file
|
||||
|
||||
found_files = {}
|
||||
for finder in get_finders():
|
||||
for path, storage in finder.list(self.ignore_patterns):
|
||||
# Prefix the relative path if the source storage contains it
|
||||
if getattr(storage, "prefix", None):
|
||||
prefixed_path = os.path.join(storage.prefix, path)
|
||||
else:
|
||||
prefixed_path = path
|
||||
|
||||
if prefixed_path not in found_files:
|
||||
found_files[prefixed_path] = (storage, path)
|
||||
handler(path, prefixed_path, storage)
|
||||
else:
|
||||
self.log(
|
||||
"Found another file with the destination path '%s'. It "
|
||||
"will be ignored since only the first encountered file "
|
||||
"is collected. If this is not what you want, make sure "
|
||||
"every static file has a unique path." % prefixed_path,
|
||||
level=1,
|
||||
)
|
||||
|
||||
# Storage backends may define a post_process() method.
|
||||
if self.post_process and hasattr(self.storage, "post_process"):
|
||||
processor = self.storage.post_process(found_files, dry_run=self.dry_run)
|
||||
for original_path, processed_path, processed in processor:
|
||||
if isinstance(processed, Exception):
|
||||
self.stderr.write("Post-processing '%s' failed!" % original_path)
|
||||
# Add a blank line before the traceback, otherwise it's
|
||||
# too easy to miss the relevant part of the error message.
|
||||
self.stderr.write()
|
||||
raise processed
|
||||
if processed:
|
||||
self.log(
|
||||
"Post-processed '%s' as '%s'" % (original_path, processed_path),
|
||||
level=2,
|
||||
)
|
||||
self.post_processed_files.append(original_path)
|
||||
else:
|
||||
self.log("Skipped post-processing '%s'" % original_path)
|
||||
|
||||
return {
|
||||
"modified": self.copied_files + self.symlinked_files,
|
||||
"unmodified": self.unmodified_files,
|
||||
"post_processed": self.post_processed_files,
|
||||
}
|
||||
|
||||
def handle(self, **options):
|
||||
self.set_options(**options)
|
||||
message = ["\n"]
|
||||
if self.dry_run:
|
||||
message.append(
|
||||
"You have activated the --dry-run option so no files will be "
|
||||
"modified.\n\n"
|
||||
)
|
||||
|
||||
message.append(
|
||||
"You have requested to collect static files at the destination\n"
|
||||
"location as specified in your settings"
|
||||
)
|
||||
|
||||
if self.is_local_storage() and self.storage.location:
|
||||
destination_path = self.storage.location
|
||||
message.append(":\n\n %s\n\n" % destination_path)
|
||||
should_warn_user = self.storage.exists(destination_path) and any(
|
||||
self.storage.listdir(destination_path)
|
||||
)
|
||||
else:
|
||||
destination_path = None
|
||||
message.append(".\n\n")
|
||||
# Destination files existence not checked; play it safe and warn.
|
||||
should_warn_user = True
|
||||
|
||||
if self.interactive and should_warn_user:
|
||||
if self.clear:
|
||||
message.append("This will DELETE ALL FILES in this location!\n")
|
||||
else:
|
||||
message.append("This will overwrite existing files!\n")
|
||||
|
||||
message.append(
|
||||
"Are you sure you want to do this?\n\n"
|
||||
"Type 'yes' to continue, or 'no' to cancel: "
|
||||
)
|
||||
if input("".join(message)) != "yes":
|
||||
raise CommandError("Collecting static files cancelled.")
|
||||
|
||||
collected = self.collect()
|
||||
|
||||
if self.verbosity >= 1:
|
||||
modified_count = len(collected["modified"])
|
||||
unmodified_count = len(collected["unmodified"])
|
||||
post_processed_count = len(collected["post_processed"])
|
||||
return (
|
||||
"\n%(modified_count)s %(identifier)s %(action)s"
|
||||
"%(destination)s%(unmodified)s%(post_processed)s."
|
||||
) % {
|
||||
"modified_count": modified_count,
|
||||
"identifier": "static file" + ("" if modified_count == 1 else "s"),
|
||||
"action": "symlinked" if self.symlink else "copied",
|
||||
"destination": (
|
||||
" to '%s'" % destination_path if destination_path else ""
|
||||
),
|
||||
"unmodified": (
|
||||
", %s unmodified" % unmodified_count
|
||||
if collected["unmodified"]
|
||||
else ""
|
||||
),
|
||||
"post_processed": (
|
||||
collected["post_processed"]
|
||||
and ", %s post-processed" % post_processed_count
|
||||
or ""
|
||||
),
|
||||
}
|
||||
|
||||
def log(self, msg, level=2):
|
||||
"""
|
||||
Small log helper
|
||||
"""
|
||||
if self.verbosity >= level:
|
||||
self.stdout.write(msg)
|
||||
|
||||
def is_local_storage(self):
|
||||
return isinstance(self.storage, FileSystemStorage)
|
||||
|
||||
def clear_dir(self, path):
|
||||
"""
|
||||
Delete the given relative path using the destination storage backend.
|
||||
"""
|
||||
if not self.storage.exists(path):
|
||||
return
|
||||
|
||||
dirs, files = self.storage.listdir(path)
|
||||
for f in files:
|
||||
fpath = os.path.join(path, f)
|
||||
if self.dry_run:
|
||||
self.log("Pretending to delete '%s'" % fpath, level=1)
|
||||
else:
|
||||
self.log("Deleting '%s'" % fpath, level=1)
|
||||
try:
|
||||
full_path = self.storage.path(fpath)
|
||||
except NotImplementedError:
|
||||
self.storage.delete(fpath)
|
||||
else:
|
||||
if not os.path.exists(full_path) and os.path.lexists(full_path):
|
||||
# Delete broken symlinks
|
||||
os.unlink(full_path)
|
||||
else:
|
||||
self.storage.delete(fpath)
|
||||
for d in dirs:
|
||||
self.clear_dir(os.path.join(path, d))
|
||||
|
||||
def delete_file(self, path, prefixed_path, source_storage):
|
||||
"""
|
||||
Check if the target file should be deleted if it already exists.
|
||||
"""
|
||||
if self.storage.exists(prefixed_path):
|
||||
try:
|
||||
# When was the target file modified last time?
|
||||
target_last_modified = self.storage.get_modified_time(prefixed_path)
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
# The storage doesn't support get_modified_time() or failed
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
# When was the source file modified last time?
|
||||
source_last_modified = source_storage.get_modified_time(path)
|
||||
except (OSError, NotImplementedError, AttributeError):
|
||||
pass
|
||||
else:
|
||||
# The full path of the target file
|
||||
if self.local:
|
||||
full_path = self.storage.path(prefixed_path)
|
||||
# If it's --link mode and the path isn't a link (i.e.
|
||||
# the previous collectstatic wasn't with --link) or if
|
||||
# it's non-link mode and the path is a link (i.e. the
|
||||
# previous collectstatic was with --link), the old
|
||||
# links/files must be deleted so it's not safe to skip
|
||||
# unmodified files.
|
||||
can_skip_unmodified_files = not (
|
||||
self.symlink ^ os.path.islink(full_path)
|
||||
)
|
||||
else:
|
||||
# In remote storages, skipping is only based on the
|
||||
# modified times since symlinks aren't relevant.
|
||||
can_skip_unmodified_files = True
|
||||
# Avoid sub-second precision (see #14665, #19540)
|
||||
file_is_unmodified = target_last_modified.replace(
|
||||
microsecond=0
|
||||
) >= source_last_modified.replace(microsecond=0)
|
||||
if file_is_unmodified and can_skip_unmodified_files:
|
||||
if prefixed_path not in self.unmodified_files:
|
||||
self.unmodified_files.append(prefixed_path)
|
||||
self.log("Skipping '%s' (not modified)" % path)
|
||||
return False
|
||||
# Then delete the existing file if really needed
|
||||
if self.dry_run:
|
||||
self.log("Pretending to delete '%s'" % path)
|
||||
else:
|
||||
self.log("Deleting '%s'" % path)
|
||||
self.storage.delete(prefixed_path)
|
||||
return True
|
||||
|
||||
def link_file(self, path, prefixed_path, source_storage):
|
||||
"""
|
||||
Attempt to link ``path``
|
||||
"""
|
||||
# Skip this file if it was already copied earlier
|
||||
if prefixed_path in self.symlinked_files:
|
||||
return self.log("Skipping '%s' (already linked earlier)" % path)
|
||||
# Delete the target file if needed or break
|
||||
if not self.delete_file(path, prefixed_path, source_storage):
|
||||
return
|
||||
# The full path of the source file
|
||||
source_path = source_storage.path(path)
|
||||
# Finally link the file
|
||||
if self.dry_run:
|
||||
self.log("Pretending to link '%s'" % source_path, level=1)
|
||||
else:
|
||||
self.log("Linking '%s'" % source_path, level=2)
|
||||
full_path = self.storage.path(prefixed_path)
|
||||
os.makedirs(os.path.dirname(full_path), exist_ok=True)
|
||||
try:
|
||||
if os.path.lexists(full_path):
|
||||
os.unlink(full_path)
|
||||
os.symlink(source_path, full_path)
|
||||
except NotImplementedError:
|
||||
import platform
|
||||
|
||||
raise CommandError(
|
||||
"Symlinking is not supported in this "
|
||||
"platform (%s)." % platform.platform()
|
||||
)
|
||||
except OSError as e:
|
||||
raise CommandError(e)
|
||||
if prefixed_path not in self.symlinked_files:
|
||||
self.symlinked_files.append(prefixed_path)
|
||||
|
||||
def copy_file(self, path, prefixed_path, source_storage):
|
||||
"""
|
||||
Attempt to copy ``path`` with storage
|
||||
"""
|
||||
# Skip this file if it was already copied earlier
|
||||
if prefixed_path in self.copied_files:
|
||||
return self.log("Skipping '%s' (already copied earlier)" % path)
|
||||
# Delete the target file if needed or break
|
||||
if not self.delete_file(path, prefixed_path, source_storage):
|
||||
return
|
||||
# The full path of the source file
|
||||
source_path = source_storage.path(path)
|
||||
# Finally start copying
|
||||
if self.dry_run:
|
||||
self.log("Pretending to copy '%s'" % source_path, level=1)
|
||||
else:
|
||||
self.log("Copying '%s'" % source_path, level=2)
|
||||
with source_storage.open(path) as source_file:
|
||||
self.storage.save(prefixed_path, source_file)
|
||||
self.copied_files.append(prefixed_path)
|
@ -0,0 +1,48 @@
|
||||
import os
|
||||
|
||||
from django.contrib.staticfiles import finders
|
||||
from django.core.management.base import LabelCommand
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
help = "Finds the absolute paths for the given static file(s)."
|
||||
label = "staticfile"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
"--first",
|
||||
action="store_false",
|
||||
dest="all",
|
||||
help="Only return the first match for each static file.",
|
||||
)
|
||||
|
||||
def handle_label(self, path, **options):
|
||||
verbosity = options["verbosity"]
|
||||
result = finders.find(path, all=options["all"])
|
||||
if verbosity >= 2:
|
||||
searched_locations = (
|
||||
"\nLooking in the following locations:\n %s"
|
||||
% "\n ".join([str(loc) for loc in finders.searched_locations])
|
||||
)
|
||||
else:
|
||||
searched_locations = ""
|
||||
if result:
|
||||
if not isinstance(result, (list, tuple)):
|
||||
result = [result]
|
||||
result = (os.path.realpath(path) for path in result)
|
||||
if verbosity >= 1:
|
||||
file_list = "\n ".join(result)
|
||||
return "Found '%s' here:\n %s%s" % (
|
||||
path,
|
||||
file_list,
|
||||
searched_locations,
|
||||
)
|
||||
else:
|
||||
return "\n".join(result)
|
||||
else:
|
||||
message = ["No matching file found for '%s'." % path]
|
||||
if verbosity >= 2:
|
||||
message.append(searched_locations)
|
||||
if verbosity >= 1:
|
||||
self.stderr.write("\n".join(message))
|
@ -0,0 +1,36 @@
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
||||
from django.core.management.commands.runserver import Command as RunserverCommand
|
||||
|
||||
|
||||
class Command(RunserverCommand):
|
||||
help = (
|
||||
"Starts a lightweight web server for development and also serves static files."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
"--nostatic",
|
||||
action="store_false",
|
||||
dest="use_static_handler",
|
||||
help="Tells Django to NOT automatically serve static files at STATIC_URL.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--insecure",
|
||||
action="store_true",
|
||||
dest="insecure_serving",
|
||||
help="Allows serving static files even if DEBUG is False.",
|
||||
)
|
||||
|
||||
def get_handler(self, *args, **options):
|
||||
"""
|
||||
Return the static files serving handler wrapping the default handler,
|
||||
if static files should be served. Otherwise return the default handler.
|
||||
"""
|
||||
handler = super().get_handler(*args, **options)
|
||||
use_static_handler = options["use_static_handler"]
|
||||
insecure_serving = options["insecure_serving"]
|
||||
if use_static_handler and (settings.DEBUG or insecure_serving):
|
||||
return StaticFilesHandler(handler)
|
||||
return handler
|
@ -0,0 +1,540 @@
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit
|
||||
|
||||
from django.conf import STATICFILES_STORAGE_ALIAS, settings
|
||||
from django.contrib.staticfiles.utils import check_settings, matches_patterns
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.storage import FileSystemStorage, storages
|
||||
from django.utils.crypto import md5
|
||||
from django.utils.functional import LazyObject
|
||||
|
||||
|
||||
class StaticFilesStorage(FileSystemStorage):
|
||||
"""
|
||||
Standard file system storage for static files.
|
||||
|
||||
The defaults for ``location`` and ``base_url`` are
|
||||
``STATIC_ROOT`` and ``STATIC_URL``.
|
||||
"""
|
||||
|
||||
def __init__(self, location=None, base_url=None, *args, **kwargs):
|
||||
if location is None:
|
||||
location = settings.STATIC_ROOT
|
||||
if base_url is None:
|
||||
base_url = settings.STATIC_URL
|
||||
check_settings(base_url)
|
||||
super().__init__(location, base_url, *args, **kwargs)
|
||||
# FileSystemStorage fallbacks to MEDIA_ROOT when location
|
||||
# is empty, so we restore the empty value.
|
||||
if not location:
|
||||
self.base_location = None
|
||||
self.location = None
|
||||
|
||||
def path(self, name):
|
||||
if not self.location:
|
||||
raise ImproperlyConfigured(
|
||||
"You're using the staticfiles app "
|
||||
"without having set the STATIC_ROOT "
|
||||
"setting to a filesystem path."
|
||||
)
|
||||
return super().path(name)
|
||||
|
||||
|
||||
class HashedFilesMixin:
|
||||
default_template = """url("%(url)s")"""
|
||||
max_post_process_passes = 5
|
||||
support_js_module_import_aggregation = False
|
||||
_js_module_import_aggregation_patterns = (
|
||||
"*.js",
|
||||
(
|
||||
(
|
||||
(
|
||||
r"""(?P<matched>import(?s:(?P<import>[\s\{].*?))"""
|
||||
r"""\s*from\s*['"](?P<url>[\.\/].*?)["']\s*;)"""
|
||||
),
|
||||
"""import%(import)s from "%(url)s";""",
|
||||
),
|
||||
(
|
||||
(
|
||||
r"""(?P<matched>export(?s:(?P<exports>[\s\{].*?))"""
|
||||
r"""\s*from\s*["'](?P<url>[\.\/].*?)["']\s*;)"""
|
||||
),
|
||||
"""export%(exports)s from "%(url)s";""",
|
||||
),
|
||||
(
|
||||
r"""(?P<matched>import\s*['"](?P<url>[\.\/].*?)["']\s*;)""",
|
||||
"""import"%(url)s";""",
|
||||
),
|
||||
(
|
||||
r"""(?P<matched>import\(["'](?P<url>.*?)["']\))""",
|
||||
"""import("%(url)s")""",
|
||||
),
|
||||
),
|
||||
)
|
||||
patterns = (
|
||||
(
|
||||
"*.css",
|
||||
(
|
||||
r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""",
|
||||
(
|
||||
r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""",
|
||||
"""@import url("%(url)s")""",
|
||||
),
|
||||
(
|
||||
(
|
||||
r"(?m)(?P<matched>)^(/\*#[ \t]"
|
||||
r"(?-i:sourceMappingURL)=(?P<url>.*)[ \t]*\*/)$"
|
||||
),
|
||||
"/*# sourceMappingURL=%(url)s */",
|
||||
),
|
||||
),
|
||||
),
|
||||
(
|
||||
"*.js",
|
||||
(
|
||||
(
|
||||
r"(?m)(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$",
|
||||
"//# sourceMappingURL=%(url)s",
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
keep_intermediate_files = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if self.support_js_module_import_aggregation:
|
||||
self.patterns += (self._js_module_import_aggregation_patterns,)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._patterns = {}
|
||||
self.hashed_files = {}
|
||||
for extension, patterns in self.patterns:
|
||||
for pattern in patterns:
|
||||
if isinstance(pattern, (tuple, list)):
|
||||
pattern, template = pattern
|
||||
else:
|
||||
template = self.default_template
|
||||
compiled = re.compile(pattern, re.IGNORECASE)
|
||||
self._patterns.setdefault(extension, []).append((compiled, template))
|
||||
|
||||
def file_hash(self, name, content=None):
|
||||
"""
|
||||
Return a hash of the file with the given name and optional content.
|
||||
"""
|
||||
if content is None:
|
||||
return None
|
||||
hasher = md5(usedforsecurity=False)
|
||||
for chunk in content.chunks():
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()[:12]
|
||||
|
||||
def hashed_name(self, name, content=None, filename=None):
|
||||
# `filename` is the name of file to hash if `content` isn't given.
|
||||
# `name` is the base name to construct the new hashed filename from.
|
||||
parsed_name = urlsplit(unquote(name))
|
||||
clean_name = parsed_name.path.strip()
|
||||
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
||||
opened = content is None
|
||||
if opened:
|
||||
if not self.exists(filename):
|
||||
raise ValueError(
|
||||
"The file '%s' could not be found with %r." % (filename, self)
|
||||
)
|
||||
try:
|
||||
content = self.open(filename)
|
||||
except OSError:
|
||||
# Handle directory paths and fragments
|
||||
return name
|
||||
try:
|
||||
file_hash = self.file_hash(clean_name, content)
|
||||
finally:
|
||||
if opened:
|
||||
content.close()
|
||||
path, filename = os.path.split(clean_name)
|
||||
root, ext = os.path.splitext(filename)
|
||||
file_hash = (".%s" % file_hash) if file_hash else ""
|
||||
hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext))
|
||||
unparsed_name = list(parsed_name)
|
||||
unparsed_name[2] = hashed_name
|
||||
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
||||
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
||||
if "?#" in name and not unparsed_name[3]:
|
||||
unparsed_name[2] += "?"
|
||||
return urlunsplit(unparsed_name)
|
||||
|
||||
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
|
||||
"""
|
||||
Return the non-hashed URL in DEBUG mode.
|
||||
"""
|
||||
if settings.DEBUG and not force:
|
||||
hashed_name, fragment = name, ""
|
||||
else:
|
||||
clean_name, fragment = urldefrag(name)
|
||||
if urlsplit(clean_name).path.endswith("/"): # don't hash paths
|
||||
hashed_name = name
|
||||
else:
|
||||
args = (clean_name,)
|
||||
if hashed_files is not None:
|
||||
args += (hashed_files,)
|
||||
hashed_name = hashed_name_func(*args)
|
||||
|
||||
final_url = super().url(hashed_name)
|
||||
|
||||
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
||||
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
||||
query_fragment = "?#" in name # [sic!]
|
||||
if fragment or query_fragment:
|
||||
urlparts = list(urlsplit(final_url))
|
||||
if fragment and not urlparts[4]:
|
||||
urlparts[4] = fragment
|
||||
if query_fragment and not urlparts[3]:
|
||||
urlparts[2] += "?"
|
||||
final_url = urlunsplit(urlparts)
|
||||
|
||||
return unquote(final_url)
|
||||
|
||||
def url(self, name, force=False):
|
||||
"""
|
||||
Return the non-hashed URL in DEBUG mode.
|
||||
"""
|
||||
return self._url(self.stored_name, name, force)
|
||||
|
||||
def url_converter(self, name, hashed_files, template=None):
|
||||
"""
|
||||
Return the custom URL converter for the given file name.
|
||||
"""
|
||||
if template is None:
|
||||
template = self.default_template
|
||||
|
||||
def converter(matchobj):
|
||||
"""
|
||||
Convert the matched URL to a normalized and hashed URL.
|
||||
|
||||
This requires figuring out which files the matched URL resolves
|
||||
to and calling the url() method of the storage.
|
||||
"""
|
||||
matches = matchobj.groupdict()
|
||||
matched = matches["matched"]
|
||||
url = matches["url"]
|
||||
|
||||
# Ignore absolute/protocol-relative and data-uri URLs.
|
||||
if re.match(r"^[a-z]+:", url):
|
||||
return matched
|
||||
|
||||
# Ignore absolute URLs that don't point to a static file (dynamic
|
||||
# CSS / JS?). Note that STATIC_URL cannot be empty.
|
||||
if url.startswith("/") and not url.startswith(settings.STATIC_URL):
|
||||
return matched
|
||||
|
||||
# Strip off the fragment so a path-like fragment won't interfere.
|
||||
url_path, fragment = urldefrag(url)
|
||||
|
||||
# Ignore URLs without a path
|
||||
if not url_path:
|
||||
return matched
|
||||
|
||||
if url_path.startswith("/"):
|
||||
# Otherwise the condition above would have returned prematurely.
|
||||
assert url_path.startswith(settings.STATIC_URL)
|
||||
target_name = url_path[len(settings.STATIC_URL) :]
|
||||
else:
|
||||
# We're using the posixpath module to mix paths and URLs conveniently.
|
||||
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
|
||||
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
|
||||
|
||||
# Determine the hashed name of the target file with the storage backend.
|
||||
hashed_url = self._url(
|
||||
self._stored_name,
|
||||
unquote(target_name),
|
||||
force=True,
|
||||
hashed_files=hashed_files,
|
||||
)
|
||||
|
||||
transformed_url = "/".join(
|
||||
url_path.split("/")[:-1] + hashed_url.split("/")[-1:]
|
||||
)
|
||||
|
||||
# Restore the fragment that was stripped off earlier.
|
||||
if fragment:
|
||||
transformed_url += ("?#" if "?#" in url else "#") + fragment
|
||||
|
||||
# Return the hashed version to the file
|
||||
matches["url"] = unquote(transformed_url)
|
||||
return template % matches
|
||||
|
||||
return converter
|
||||
|
||||
def post_process(self, paths, dry_run=False, **options):
|
||||
"""
|
||||
Post process the given dictionary of files (called from collectstatic).
|
||||
|
||||
Processing is actually two separate operations:
|
||||
|
||||
1. renaming files to include a hash of their content for cache-busting,
|
||||
and copying those files to the target storage.
|
||||
2. adjusting files which contain references to other files so they
|
||||
refer to the cache-busting filenames.
|
||||
|
||||
If either of these are performed on a file, then that file is considered
|
||||
post-processed.
|
||||
"""
|
||||
# don't even dare to process the files if we're in dry run mode
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
# where to store the new paths
|
||||
hashed_files = {}
|
||||
|
||||
# build a list of adjustable files
|
||||
adjustable_paths = [
|
||||
path for path in paths if matches_patterns(path, self._patterns)
|
||||
]
|
||||
|
||||
# Adjustable files to yield at end, keyed by the original path.
|
||||
processed_adjustable_paths = {}
|
||||
|
||||
# Do a single pass first. Post-process all files once, yielding not
|
||||
# adjustable files and exceptions, and collecting adjustable files.
|
||||
for name, hashed_name, processed, _ in self._post_process(
|
||||
paths, adjustable_paths, hashed_files
|
||||
):
|
||||
if name not in adjustable_paths or isinstance(processed, Exception):
|
||||
yield name, hashed_name, processed
|
||||
else:
|
||||
processed_adjustable_paths[name] = (name, hashed_name, processed)
|
||||
|
||||
paths = {path: paths[path] for path in adjustable_paths}
|
||||
substitutions = False
|
||||
|
||||
for i in range(self.max_post_process_passes):
|
||||
substitutions = False
|
||||
for name, hashed_name, processed, subst in self._post_process(
|
||||
paths, adjustable_paths, hashed_files
|
||||
):
|
||||
# Overwrite since hashed_name may be newer.
|
||||
processed_adjustable_paths[name] = (name, hashed_name, processed)
|
||||
substitutions = substitutions or subst
|
||||
|
||||
if not substitutions:
|
||||
break
|
||||
|
||||
if substitutions:
|
||||
yield "All", None, RuntimeError("Max post-process passes exceeded.")
|
||||
|
||||
# Store the processed paths
|
||||
self.hashed_files.update(hashed_files)
|
||||
|
||||
# Yield adjustable files with final, hashed name.
|
||||
yield from processed_adjustable_paths.values()
|
||||
|
||||
def _post_process(self, paths, adjustable_paths, hashed_files):
|
||||
# Sort the files by directory level
|
||||
def path_level(name):
|
||||
return len(name.split(os.sep))
|
||||
|
||||
for name in sorted(paths, key=path_level, reverse=True):
|
||||
substitutions = True
|
||||
# use the original, local file, not the copied-but-unprocessed
|
||||
# file, which might be somewhere far away, like S3
|
||||
storage, path = paths[name]
|
||||
with storage.open(path) as original_file:
|
||||
cleaned_name = self.clean_name(name)
|
||||
hash_key = self.hash_key(cleaned_name)
|
||||
|
||||
# generate the hash with the original content, even for
|
||||
# adjustable files.
|
||||
if hash_key not in hashed_files:
|
||||
hashed_name = self.hashed_name(name, original_file)
|
||||
else:
|
||||
hashed_name = hashed_files[hash_key]
|
||||
|
||||
# then get the original's file content..
|
||||
if hasattr(original_file, "seek"):
|
||||
original_file.seek(0)
|
||||
|
||||
hashed_file_exists = self.exists(hashed_name)
|
||||
processed = False
|
||||
|
||||
# ..to apply each replacement pattern to the content
|
||||
if name in adjustable_paths:
|
||||
old_hashed_name = hashed_name
|
||||
content = original_file.read().decode("utf-8")
|
||||
for extension, patterns in self._patterns.items():
|
||||
if matches_patterns(path, (extension,)):
|
||||
for pattern, template in patterns:
|
||||
converter = self.url_converter(
|
||||
name, hashed_files, template
|
||||
)
|
||||
try:
|
||||
content = pattern.sub(converter, content)
|
||||
except ValueError as exc:
|
||||
yield name, None, exc, False
|
||||
if hashed_file_exists:
|
||||
self.delete(hashed_name)
|
||||
# then save the processed result
|
||||
content_file = ContentFile(content.encode())
|
||||
if self.keep_intermediate_files:
|
||||
# Save intermediate file for reference
|
||||
self._save(hashed_name, content_file)
|
||||
hashed_name = self.hashed_name(name, content_file)
|
||||
|
||||
if self.exists(hashed_name):
|
||||
self.delete(hashed_name)
|
||||
|
||||
saved_name = self._save(hashed_name, content_file)
|
||||
hashed_name = self.clean_name(saved_name)
|
||||
# If the file hash stayed the same, this file didn't change
|
||||
if old_hashed_name == hashed_name:
|
||||
substitutions = False
|
||||
processed = True
|
||||
|
||||
if not processed:
|
||||
# or handle the case in which neither processing nor
|
||||
# a change to the original file happened
|
||||
if not hashed_file_exists:
|
||||
processed = True
|
||||
saved_name = self._save(hashed_name, original_file)
|
||||
hashed_name = self.clean_name(saved_name)
|
||||
|
||||
# and then set the cache accordingly
|
||||
hashed_files[hash_key] = hashed_name
|
||||
|
||||
yield name, hashed_name, processed, substitutions
|
||||
|
||||
def clean_name(self, name):
|
||||
return name.replace("\\", "/")
|
||||
|
||||
def hash_key(self, name):
|
||||
return name
|
||||
|
||||
def _stored_name(self, name, hashed_files):
|
||||
# Normalize the path to avoid multiple names for the same file like
|
||||
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
|
||||
# path.
|
||||
name = posixpath.normpath(name)
|
||||
cleaned_name = self.clean_name(name)
|
||||
hash_key = self.hash_key(cleaned_name)
|
||||
cache_name = hashed_files.get(hash_key)
|
||||
if cache_name is None:
|
||||
cache_name = self.clean_name(self.hashed_name(name))
|
||||
return cache_name
|
||||
|
||||
def stored_name(self, name):
|
||||
cleaned_name = self.clean_name(name)
|
||||
hash_key = self.hash_key(cleaned_name)
|
||||
cache_name = self.hashed_files.get(hash_key)
|
||||
if cache_name:
|
||||
return cache_name
|
||||
# No cached name found, recalculate it from the files.
|
||||
intermediate_name = name
|
||||
for i in range(self.max_post_process_passes + 1):
|
||||
cache_name = self.clean_name(
|
||||
self.hashed_name(name, content=None, filename=intermediate_name)
|
||||
)
|
||||
if intermediate_name == cache_name:
|
||||
# Store the hashed name if there was a miss.
|
||||
self.hashed_files[hash_key] = cache_name
|
||||
return cache_name
|
||||
else:
|
||||
# Move on to the next intermediate file.
|
||||
intermediate_name = cache_name
|
||||
# If the cache name can't be determined after the max number of passes,
|
||||
# the intermediate files on disk may be corrupt; avoid an infinite loop.
|
||||
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
|
||||
|
||||
|
||||
class ManifestFilesMixin(HashedFilesMixin):
|
||||
manifest_version = "1.1" # the manifest format standard
|
||||
manifest_name = "staticfiles.json"
|
||||
manifest_strict = True
|
||||
keep_intermediate_files = False
|
||||
|
||||
def __init__(self, *args, manifest_storage=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
if manifest_storage is None:
|
||||
manifest_storage = self
|
||||
self.manifest_storage = manifest_storage
|
||||
self.hashed_files, self.manifest_hash = self.load_manifest()
|
||||
|
||||
def read_manifest(self):
|
||||
try:
|
||||
with self.manifest_storage.open(self.manifest_name) as manifest:
|
||||
return manifest.read().decode()
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
def load_manifest(self):
|
||||
content = self.read_manifest()
|
||||
if content is None:
|
||||
return {}, ""
|
||||
try:
|
||||
stored = json.loads(content)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
else:
|
||||
version = stored.get("version")
|
||||
if version in ("1.0", "1.1"):
|
||||
return stored.get("paths", {}), stored.get("hash", "")
|
||||
raise ValueError(
|
||||
"Couldn't load manifest '%s' (version %s)"
|
||||
% (self.manifest_name, self.manifest_version)
|
||||
)
|
||||
|
||||
def post_process(self, *args, **kwargs):
|
||||
self.hashed_files = {}
|
||||
yield from super().post_process(*args, **kwargs)
|
||||
if not kwargs.get("dry_run"):
|
||||
self.save_manifest()
|
||||
|
||||
def save_manifest(self):
|
||||
self.manifest_hash = self.file_hash(
|
||||
None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode())
|
||||
)
|
||||
payload = {
|
||||
"paths": self.hashed_files,
|
||||
"version": self.manifest_version,
|
||||
"hash": self.manifest_hash,
|
||||
}
|
||||
if self.manifest_storage.exists(self.manifest_name):
|
||||
self.manifest_storage.delete(self.manifest_name)
|
||||
contents = json.dumps(payload).encode()
|
||||
self.manifest_storage._save(self.manifest_name, ContentFile(contents))
|
||||
|
||||
def stored_name(self, name):
|
||||
parsed_name = urlsplit(unquote(name))
|
||||
clean_name = parsed_name.path.strip()
|
||||
hash_key = self.hash_key(clean_name)
|
||||
cache_name = self.hashed_files.get(hash_key)
|
||||
if cache_name is None:
|
||||
if self.manifest_strict:
|
||||
raise ValueError(
|
||||
"Missing staticfiles manifest entry for '%s'" % clean_name
|
||||
)
|
||||
cache_name = self.clean_name(self.hashed_name(name))
|
||||
unparsed_name = list(parsed_name)
|
||||
unparsed_name[2] = cache_name
|
||||
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
||||
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
||||
if "?#" in name and not unparsed_name[3]:
|
||||
unparsed_name[2] += "?"
|
||||
return urlunsplit(unparsed_name)
|
||||
|
||||
|
||||
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
|
||||
"""
|
||||
A static file system storage backend which also saves
|
||||
hashed copies of the files it saves.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ConfiguredStorage(LazyObject):
|
||||
def _setup(self):
|
||||
self._wrapped = storages[STATICFILES_STORAGE_ALIAS]
|
||||
|
||||
|
||||
staticfiles_storage = ConfiguredStorage()
|
@ -0,0 +1,13 @@
|
||||
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
||||
from django.test import LiveServerTestCase
|
||||
|
||||
|
||||
class StaticLiveServerTestCase(LiveServerTestCase):
|
||||
"""
|
||||
Extend django.test.LiveServerTestCase to transparently overlay at test
|
||||
execution-time the assets provided by the staticfiles app finders. This
|
||||
means you don't need to run collectstatic before or as a part of your tests
|
||||
setup.
|
||||
"""
|
||||
|
||||
static_handler = StaticFilesHandler
|
@ -0,0 +1,19 @@
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib.staticfiles.views import serve
|
||||
|
||||
urlpatterns = []
|
||||
|
||||
|
||||
def staticfiles_urlpatterns(prefix=None):
|
||||
"""
|
||||
Helper function to return a URL pattern for serving static files.
|
||||
"""
|
||||
if prefix is None:
|
||||
prefix = settings.STATIC_URL
|
||||
return static(prefix, view=serve)
|
||||
|
||||
|
||||
# Only append if urlpatterns are empty
|
||||
if settings.DEBUG and not urlpatterns:
|
||||
urlpatterns += staticfiles_urlpatterns()
|
@ -0,0 +1,71 @@
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
def matches_patterns(path, patterns):
|
||||
"""
|
||||
Return True or False depending on whether the ``path`` should be
|
||||
ignored (if it matches any pattern in ``ignore_patterns``).
|
||||
"""
|
||||
return any(fnmatch.fnmatchcase(path, pattern) for pattern in patterns)
|
||||
|
||||
|
||||
def get_files(storage, ignore_patterns=None, location=""):
|
||||
"""
|
||||
Recursively walk the storage directories yielding the paths
|
||||
of all files that should be copied.
|
||||
"""
|
||||
if ignore_patterns is None:
|
||||
ignore_patterns = []
|
||||
directories, files = storage.listdir(location)
|
||||
for fn in files:
|
||||
# Match only the basename.
|
||||
if matches_patterns(fn, ignore_patterns):
|
||||
continue
|
||||
if location:
|
||||
fn = os.path.join(location, fn)
|
||||
# Match the full file path.
|
||||
if matches_patterns(fn, ignore_patterns):
|
||||
continue
|
||||
yield fn
|
||||
for dir in directories:
|
||||
if matches_patterns(dir, ignore_patterns):
|
||||
continue
|
||||
if location:
|
||||
dir = os.path.join(location, dir)
|
||||
yield from get_files(storage, ignore_patterns, dir)
|
||||
|
||||
|
||||
def check_settings(base_url=None):
|
||||
"""
|
||||
Check if the staticfiles settings have sane values.
|
||||
"""
|
||||
if base_url is None:
|
||||
base_url = settings.STATIC_URL
|
||||
if not base_url:
|
||||
raise ImproperlyConfigured(
|
||||
"You're using the staticfiles app "
|
||||
"without having set the required STATIC_URL setting."
|
||||
)
|
||||
if settings.MEDIA_URL == base_url:
|
||||
raise ImproperlyConfigured(
|
||||
"The MEDIA_URL and STATIC_URL settings must have different values"
|
||||
)
|
||||
if (
|
||||
settings.DEBUG
|
||||
and settings.MEDIA_URL
|
||||
and settings.STATIC_URL
|
||||
and settings.MEDIA_URL.startswith(settings.STATIC_URL)
|
||||
):
|
||||
raise ImproperlyConfigured(
|
||||
"runserver can't serve media if MEDIA_URL is within STATIC_URL."
|
||||
)
|
||||
if (settings.MEDIA_ROOT and settings.STATIC_ROOT) and (
|
||||
settings.MEDIA_ROOT == settings.STATIC_ROOT
|
||||
):
|
||||
raise ImproperlyConfigured(
|
||||
"The MEDIA_ROOT and STATIC_ROOT settings must have different values"
|
||||
)
|
@ -0,0 +1,39 @@
|
||||
"""
|
||||
Views and functions for serving static files. These are only to be used during
|
||||
development, and SHOULD NOT be used in a production setting.
|
||||
|
||||
"""
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.staticfiles import finders
|
||||
from django.http import Http404
|
||||
from django.views import static
|
||||
|
||||
|
||||
def serve(request, path, insecure=False, **kwargs):
|
||||
"""
|
||||
Serve static files below a given point in the directory structure or
|
||||
from locations inferred from the staticfiles finders.
|
||||
|
||||
To use, put a URL pattern such as::
|
||||
|
||||
from django.contrib.staticfiles import views
|
||||
|
||||
path('<path:path>', views.serve)
|
||||
|
||||
in your URLconf.
|
||||
|
||||
It uses the django.views.static.serve() view to serve the found files.
|
||||
"""
|
||||
if not settings.DEBUG and not insecure:
|
||||
raise Http404
|
||||
normalized_path = posixpath.normpath(path).lstrip("/")
|
||||
absolute_path = finders.find(normalized_path)
|
||||
if not absolute_path:
|
||||
if path.endswith("/") or path == "":
|
||||
raise Http404("Directory indexes are not allowed here.")
|
||||
raise Http404("'%s' could not be found" % path)
|
||||
document_root, path = os.path.split(absolute_path)
|
||||
return static.serve(request, path, document_root=document_root, **kwargs)
|
Reference in New Issue
Block a user