summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMathieu Pillard <diox@users.noreply.github.com>2022-10-28 19:01:54 +0200
committerGitHub <noreply@github.com>2022-10-28 19:01:54 +0200
commitc9b75865c79b8008765b417147a9a6012e50de1c (patch)
treed2962abaff4df318c6ef92639c571d4146354029
parentf99346cf86be43f89e8f42ddf50dc6a01bcf48f1 (diff)
downloaddjango-compressor-c9b75865c79b8008765b417147a9a6012e50de1c.tar.gz
Reformat with black (#1151)
* Reformat with black * Update flake8 ignores
-rw-r--r--Makefile2
-rw-r--r--compressor/base.py171
-rw-r--r--compressor/cache.py36
-rw-r--r--compressor/conf.py82
-rw-r--r--compressor/contrib/jinja2ext.py46
-rw-r--r--compressor/contrib/sekizai.py43
-rw-r--r--compressor/css.py31
-rw-r--r--compressor/exceptions.py8
-rw-r--r--compressor/filters/__init__.py9
-rw-r--r--compressor/filters/base.py75
-rw-r--r--compressor/filters/css_default.py62
-rw-r--r--compressor/filters/cssmin/__init__.py5
-rw-r--r--compressor/filters/datauri.py21
-rw-r--r--compressor/filters/jsmin/__init__.py6
-rw-r--r--compressor/filters/template.py1
-rw-r--r--compressor/filters/yuglify.py6
-rw-r--r--compressor/filters/yui.py8
-rw-r--r--compressor/finders.py1
-rw-r--r--compressor/js.py32
-rw-r--r--compressor/management/commands/compress.py220
-rw-r--r--compressor/management/commands/mtime_cache.py74
-rw-r--r--compressor/offline/django.py36
-rw-r--r--compressor/offline/jinja2.py27
-rw-r--r--compressor/parser/__init__.py2
-rw-r--r--compressor/parser/base.py1
-rw-r--r--compressor/parser/beautifulsoup.py10
-rw-r--r--compressor/parser/default_htmlparser.py60
-rw-r--r--compressor/parser/html5lib.py17
-rw-r--r--compressor/parser/lxml.py13
-rw-r--r--compressor/storage.py23
-rw-r--r--compressor/templatetags/compress.py71
-rw-r--r--compressor/test_settings.py81
-rw-r--r--compressor/tests/precompiler.py30
-rw-r--r--compressor/tests/test_base.py355
-rw-r--r--compressor/tests/test_conf.py23
-rw-r--r--compressor/tests/test_filters.py449
-rw-r--r--compressor/tests/test_finder.py1
-rw-r--r--compressor/tests/test_jinja2ext.py142
-rw-r--r--compressor/tests/test_mtime_cache.py25
-rw-r--r--compressor/tests/test_offline.py634
-rw-r--r--compressor/tests/test_parsers.py88
-rw-r--r--compressor/tests/test_sekizai.py27
-rw-r--r--compressor/tests/test_signals.py33
-rw-r--r--compressor/tests/test_storages.py52
-rw-r--r--compressor/tests/test_templatetags.py75
-rw-r--r--compressor/tests/test_utils.py32
-rw-r--r--compressor/utils/__init__.py22
-rw-r--r--compressor/utils/staticfiles.py6
48 files changed, 1946 insertions, 1328 deletions
diff --git a/Makefile b/Makefile
index 9ea1fce..b24405f 100644
--- a/Makefile
+++ b/Makefile
@@ -4,7 +4,7 @@ testenv:
pip install Django
flake8:
- flake8 compressor --ignore=E501,E128,E701,E261,E301,E126,E127,E131,E402,W503
+ flake8 compressor --ignore=E203,E501,W503
runtests:
coverage run --branch --source=compressor `which django-admin` test --settings=compressor.test_settings compressor
diff --git a/compressor/base.py b/compressor/base.py
index 12e10ba..0321840 100644
--- a/compressor/base.py
+++ b/compressor/base.py
@@ -10,16 +10,19 @@ from django.utils.functional import cached_property
from compressor.cache import get_hexdigest, get_mtime
from compressor.conf import settings
-from compressor.exceptions import (CompressorError, UncompressableFileError,
- FilterDoesNotExist)
+from compressor.exceptions import (
+ CompressorError,
+ UncompressableFileError,
+ FilterDoesNotExist,
+)
from compressor.filters import CachedCompilerFilter
from compressor.storage import compressor_file_storage
from compressor.signals import post_compress
from compressor.utils import get_class, get_mod_func, staticfiles
# Some constants for nicer handling.
-SOURCE_HUNK, SOURCE_FILE = 'inline', 'file'
-METHOD_INPUT, METHOD_OUTPUT = 'input', 'output'
+SOURCE_HUNK, SOURCE_FILE = "inline", "file"
+METHOD_INPUT, METHOD_OUTPUT = "input", "output"
class Compressor:
@@ -30,8 +33,18 @@ class Compressor:
output_mimetypes = {}
- def __init__(self, resource_kind, content=None, output_prefix=None,
- context=None, filters=None, log=None, verbosity=1, *args, **kwargs):
+ def __init__(
+ self,
+ resource_kind,
+ content=None,
+ output_prefix=None,
+ context=None,
+ filters=None,
+ log=None,
+ verbosity=1,
+ *args,
+ **kwargs
+ ):
if filters is None:
self.filters = settings.COMPRESS_FILTERS[resource_kind]
else:
@@ -41,7 +54,7 @@ class Compressor:
else:
self.output_prefix = output_prefix
self.content = content or "" # rendered contents of {% compress %} tag
- self.output_dir = settings.COMPRESS_OUTPUT_DIR.strip('/')
+ self.output_dir = settings.COMPRESS_OUTPUT_DIR.strip("/")
self.charset = settings.DEFAULT_CHARSET
self.split_content = []
self.context = context or {}
@@ -58,13 +71,15 @@ class Compressor:
content=self.content,
context=self.context,
output_prefix=self.output_prefix,
- filters=self.filters)
+ filters=self.filters,
+ )
keywords.update(kwargs)
return self.__class__(self.resource_kind, **keywords)
@cached_property
def storage(self):
from compressor.storage import default_storage
+
return default_storage
def split_contents(self):
@@ -103,9 +118,11 @@ class Compressor:
base_url = str(base_url)
if not url.startswith(base_url):
- raise UncompressableFileError("'%s' isn't accessible via "
- "COMPRESS_URL ('%s') and can't be "
- "compressed" % (url, base_url))
+ raise UncompressableFileError(
+ "'%s' isn't accessible via "
+ "COMPRESS_URL ('%s') and can't be "
+ "compressed" % (url, base_url)
+ )
basename = url.replace(base_url, "", 1)
# drop the querystring, which is used for non-compressed cache-busting.
return basename.split("?", 1)[0]
@@ -127,7 +144,7 @@ class Compressor:
filename = os.path.split(basename)[1]
parts.append(os.path.splitext(filename)[0])
parts.extend([get_hexdigest(content, 12), self.resource_kind])
- return os.path.join(self.output_dir, self.output_prefix, '.'.join(parts))
+ return os.path.join(self.output_dir, self.output_prefix, ".".join(parts))
def get_filename(self, basename):
"""
@@ -144,50 +161,62 @@ class Compressor:
# call path first so remote storages don't make it to exists,
# which would cause network I/O
if self.log and self.verbosity >= 2:
- self.log.write('Looking for \'{}\' in storage\n'.format(basename))
+ self.log.write("Looking for '{}' in storage\n".format(basename))
filename = self.storage.path(basename)
if not self.storage.exists(basename):
filename = None
except NotImplementedError:
# remote storages don't implement path, access the file locally
if self.log and self.verbosity >= 2:
- self.log.write('Remote storages don\'t implement path, looking for the file locally\n')
+ self.log.write(
+ "Remote storages don't implement path, looking for the file locally\n"
+ )
if compressor_file_storage.exists(basename):
filename = compressor_file_storage.path(basename)
# secondly try to find it with staticfiles
if not filename and self.finders:
if self.log and self.verbosity >= 2:
if not settings.DEBUG:
- self.log.write('\'{}\' was not found in storage, using static finders\n'.format(basename))
+ self.log.write(
+ "'{}' was not found in storage, using static finders\n".format(
+ basename
+ )
+ )
else:
- self.log.write('Using static finders for \'{}\'\n'.format(basename))
+ self.log.write("Using static finders for '{}'\n".format(basename))
filename = self.finders.find(url2pathname(basename))
if filename:
return filename
# or just raise an exception as the last resort
raise UncompressableFileError(
- "'%s' could not be found in the COMPRESS_ROOT '%s'%s" %
- (basename, settings.COMPRESS_ROOT,
- self.finders and " or with staticfiles." or "."))
+ "'%s' could not be found in the COMPRESS_ROOT '%s'%s"
+ % (
+ basename,
+ settings.COMPRESS_ROOT,
+ self.finders and " or with staticfiles." or ".",
+ )
+ )
def get_filecontent(self, filename, charset):
"""
Reads file contents using given `charset` and returns it as text.
"""
- if charset == 'utf-8':
+ if charset == "utf-8":
# Removes BOM
- charset = 'utf-8-sig'
- with codecs.open(filename, 'r', charset) as fd:
+ charset = "utf-8-sig"
+ with codecs.open(filename, "r", charset) as fd:
try:
return fd.read()
except IOError as e:
- raise UncompressableFileError("IOError while processing "
- "'%s': %s" % (filename, e))
+ raise UncompressableFileError(
+ "IOError while processing " "'%s': %s" % (filename, e)
+ )
except UnicodeDecodeError as e:
- raise UncompressableFileError("UnicodeDecodeError while "
- "processing '%s' with "
- "charset %s: %s" %
- (filename, charset, e))
+ raise UncompressableFileError(
+ "UnicodeDecodeError while "
+ "processing '%s' with "
+ "charset %s: %s" % (filename, charset, e)
+ )
@cached_property
def parser(self):
@@ -199,14 +228,17 @@ class Compressor:
@cached_property
def mtimes(self):
- return [str(get_mtime(value))
- for kind, value, basename, elem in self.split_contents()
- if kind == SOURCE_FILE]
+ return [
+ str(get_mtime(value))
+ for kind, value, basename, elem in self.split_contents()
+ if kind == SOURCE_FILE
+ ]
@cached_property
def cachekey(self):
- return get_hexdigest(''.join(
- [self.content] + self.mtimes).encode(self.charset), 12)
+ return get_hexdigest(
+ "".join([self.content] + self.mtimes).encode(self.charset), 12
+ )
def hunks(self, forced=False):
"""
@@ -222,11 +254,11 @@ class Compressor:
attribs = self.parser.elem_attribs(elem)
charset = attribs.get("charset", self.charset)
options = {
- 'method': METHOD_INPUT,
- 'elem': elem,
- 'kind': kind,
- 'basename': basename,
- 'charset': charset,
+ "method": METHOD_INPUT,
+ "elem": elem,
+ "kind": kind,
+ "basename": basename,
+ "charset": charset,
}
if kind == SOURCE_FILE:
@@ -242,8 +274,7 @@ class Compressor:
for filter_cls in self.cached_filters:
if filter_cls.run_with_compression_disabled:
value = self.filter(value, [filter_cls], **options)
- yield self.handle_output(kind, value, forced=True,
- basename=basename)
+ yield self.handle_output(kind, value, forced=True, basename=basename)
else:
yield self.parser.elem_str(elem)
@@ -264,8 +295,9 @@ class Compressor:
content.append(hunk)
return content
- def precompile(self, content, kind=None, elem=None, filename=None,
- charset=None, **kwargs):
+ def precompile(
+ self, content, kind=None, elem=None, filename=None, charset=None, **kwargs
+ ):
"""
Processes file using a pre compiler.
@@ -282,31 +314,43 @@ class Compressor:
if filter_or_command is None:
if mimetype in self.output_mimetypes:
return False, content
- raise CompressorError("Couldn't find any precompiler in "
- "COMPRESS_PRECOMPILERS setting for "
- "mimetype '%s'." % mimetype)
+ raise CompressorError(
+ "Couldn't find any precompiler in "
+ "COMPRESS_PRECOMPILERS setting for "
+ "mimetype '%s'." % mimetype
+ )
mod_name, cls_name = get_mod_func(filter_or_command)
try:
mod = import_module(mod_name)
except (ImportError, TypeError):
filter = CachedCompilerFilter(
- content=content, filter_type=self.resource_kind, filename=filename,
- charset=charset, command=filter_or_command, mimetype=mimetype)
+ content=content,
+ filter_type=self.resource_kind,
+ filename=filename,
+ charset=charset,
+ command=filter_or_command,
+ mimetype=mimetype,
+ )
return True, filter.input(**kwargs)
try:
precompiler_class = getattr(mod, cls_name)
except AttributeError:
raise FilterDoesNotExist('Could not find "%s".' % filter_or_command)
filter = precompiler_class(
- content, attrs=attrs, filter_type=self.resource_kind, charset=charset,
- filename=filename)
+ content,
+ attrs=attrs,
+ filter_type=self.resource_kind,
+ charset=charset,
+ filename=filename,
+ )
return True, filter.input(**kwargs)
def filter(self, content, filters, method, **kwargs):
for filter_cls in filters:
filter_func = getattr(
- filter_cls(content, filter_type=self.resource_kind), method)
+ filter_cls(content, filter_type=self.resource_kind), method
+ )
try:
if callable(filter_func):
content = filter_func(**kwargs)
@@ -314,16 +358,16 @@ class Compressor:
pass
return content
- def output(self, mode='file', forced=False, basename=None):
+ def output(self, mode="file", forced=False, basename=None):
"""
The general output method, override in subclass if you need to do
any custom modification. Calls other mode specific methods or simply
returns the content directly.
"""
- output = '\n'.join(self.filter_input(forced))
+ output = "\n".join(self.filter_input(forced))
if not output:
- return ''
+ return ""
if settings.COMPRESS_ENABLED or forced:
filtered_output = self.filter_output(output)
@@ -337,8 +381,7 @@ class Compressor:
if callable(output_func):
return output_func(mode, content, forced, basename)
# Total failure, raise a general exception
- raise CompressorError(
- "Couldn't find output method for mode '%s'" % mode)
+ raise CompressorError("Couldn't find output method for mode '%s'" % mode)
def output_file(self, mode, content, forced=False, basename=None):
"""
@@ -372,19 +415,23 @@ class Compressor:
"""
# Just in case someone renders the compressor outside
# the usual template rendering cycle
- if 'compressed' not in self.context:
- self.context['compressed'] = {}
+ if "compressed" not in self.context:
+ self.context["compressed"] = {}
- self.context['compressed'].update(context or {})
- self.context['compressed'].update(self.extra_context)
+ self.context["compressed"].update(context or {})
+ self.context["compressed"].update(self.extra_context)
- if hasattr(self.context, 'flatten'):
+ if hasattr(self.context, "flatten"):
# Passing Contexts to Template.render is deprecated since Django 1.8.
final_context = self.context.flatten()
else:
final_context = self.context
- post_compress.send(sender=self.__class__, type=self.resource_kind,
- mode=mode, context=final_context)
+ post_compress.send(
+ sender=self.__class__,
+ type=self.resource_kind,
+ mode=mode,
+ context=final_context,
+ )
template_name = self.get_template_name(mode)
return render_to_string(template_name, context=final_context)
diff --git a/compressor/cache.py b/compressor/cache.py
index 6152357..c5dabbf 100644
--- a/compressor/cache.py
+++ b/compressor/cache.py
@@ -25,23 +25,24 @@ def get_hexdigest(plaintext, length=None):
def simple_cachekey(key):
- return 'django_compressor.%s' % force_str(key)
+ return "django_compressor.%s" % force_str(key)
def socket_cachekey(key):
- return 'django_compressor.%s.%s' % (socket.gethostname(), force_str(key))
+ return "django_compressor.%s.%s" % (socket.gethostname(), force_str(key))
def get_cachekey(*args, **kwargs):
global _cachekey_func
if _cachekey_func is None:
try:
- mod_name, func_name = get_mod_func(
- settings.COMPRESS_CACHE_KEY_FUNCTION)
+ mod_name, func_name = get_mod_func(settings.COMPRESS_CACHE_KEY_FUNCTION)
_cachekey_func = getattr(import_module(mod_name), func_name)
except (AttributeError, ImportError, TypeError) as e:
- raise ImportError("Couldn't import cache key function %s: %s" %
- (settings.COMPRESS_CACHE_KEY_FUNCTION, e))
+ raise ImportError(
+ "Couldn't import cache key function %s: %s"
+ % (settings.COMPRESS_CACHE_KEY_FUNCTION, e)
+ )
return _cachekey_func(*args, **kwargs)
@@ -57,7 +58,8 @@ def get_offline_hexdigest(render_template_string):
# a string-alike object to e.g. add ``SCRIPT_NAME`` WSGI param
# as a *path prefix* to the output URL.
# See https://code.djangoproject.com/ticket/25598.
- str(settings.STATIC_URL), ''
+ str(settings.STATIC_URL),
+ "",
)
)
@@ -75,7 +77,7 @@ def get_offline_manifest():
filename = settings.COMPRESS_OFFLINE_MANIFEST
if default_offline_manifest_storage.exists(filename):
with default_offline_manifest_storage.open(filename) as fp:
- _offline_manifest = json.loads(fp.read().decode('utf8'))
+ _offline_manifest = json.loads(fp.read().decode("utf8"))
else:
_offline_manifest = {}
return _offline_manifest
@@ -87,14 +89,15 @@ def flush_offline_manifest():
def write_offline_manifest(manifest):
- content = json.dumps(manifest, indent=2).encode('utf8')
- default_offline_manifest_storage.save(settings.COMPRESS_OFFLINE_MANIFEST, ContentFile(content))
+ content = json.dumps(manifest, indent=2).encode("utf8")
+ default_offline_manifest_storage.save(
+ settings.COMPRESS_OFFLINE_MANIFEST, ContentFile(content)
+ )
flush_offline_manifest()
def get_templatetag_cachekey(compressor, mode, kind):
- return get_cachekey(
- "templatetag.%s.%s.%s" % (compressor.cachekey, mode, kind))
+ return get_cachekey("templatetag.%s.%s.%s" % (compressor.cachekey, mode, kind))
def get_mtime(filename):
@@ -124,12 +127,14 @@ def get_hashed_content(filename, length=12):
return None
# should we make sure that file is utf-8 encoded?
- with open(filename, 'rb') as file:
+ with open(filename, "rb") as file:
return get_hexdigest(file.read(), length)
def get_precompiler_cachekey(command, contents):
- return hashlib.sha1(smart_bytes('precompiler.%s.%s' % (command, contents))).hexdigest()
+ return hashlib.sha1(
+ smart_bytes("precompiler.%s.%s" % (command, contents))
+ ).hexdigest()
def cache_get(key):
@@ -140,8 +145,7 @@ def cache_get(key):
if (time.time() > refresh_time) and not refreshed:
# Store the stale value while the cache
# revalidates for another MINT_DELAY seconds.
- cache_set(key, val, refreshed=True,
- timeout=settings.COMPRESS_MINT_DELAY)
+ cache_set(key, val, refreshed=True, timeout=settings.COMPRESS_MINT_DELAY)
return None
return val
diff --git a/compressor/conf.py b/compressor/conf.py
index 7ebdbae..fff71b2 100644
--- a/compressor/conf.py
+++ b/compressor/conf.py
@@ -14,27 +14,27 @@ class CompressorConf(AppConf):
# GET variable that disables compressor e.g. "nocompress"
DEBUG_TOGGLE = None
# the backend to use when parsing the JavaScript or Stylesheet files
- PARSER = 'compressor.parser.AutoSelectParser'
- OUTPUT_DIR = 'CACHE'
- STORAGE = 'compressor.storage.CompressorFileStorage'
+ PARSER = "compressor.parser.AutoSelectParser"
+ OUTPUT_DIR = "CACHE"
+ STORAGE = "compressor.storage.CompressorFileStorage"
COMPRESSORS = dict(
- css='compressor.css.CssCompressor',
- js='compressor.js.JsCompressor',
+ css="compressor.css.CssCompressor",
+ js="compressor.js.JsCompressor",
)
URL = None
ROOT = None
FILTERS = {
- 'css': [
- 'compressor.filters.css_default.CssAbsoluteFilter',
- 'compressor.filters.cssmin.rCSSMinFilter'
+ "css": [
+ "compressor.filters.css_default.CssAbsoluteFilter",
+ "compressor.filters.cssmin.rCSSMinFilter",
],
- 'js': ['compressor.filters.jsmin.rJSMinFilter']
+ "js": ["compressor.filters.jsmin.rJSMinFilter"],
}
- CSS_HASHING_METHOD = 'mtime'
+ CSS_HASHING_METHOD = "mtime"
PRECOMPILERS = (
# ('text/coffeescript', 'coffee --compile --stdio'),
@@ -44,22 +44,22 @@ class CompressorConf(AppConf):
# ('text/x-scss', 'sass --scss {infile} {outfile}'),
)
CACHEABLE_PRECOMPILERS = ()
- CLOSURE_COMPILER_BINARY = 'java -jar compiler.jar'
- CLOSURE_COMPILER_ARGUMENTS = ''
- YUI_BINARY = 'java -jar yuicompressor.jar'
- YUI_CSS_ARGUMENTS = ''
- YUI_JS_ARGUMENTS = ''
- YUGLIFY_BINARY = 'yuglify'
- YUGLIFY_CSS_ARGUMENTS = '--terminal'
- YUGLIFY_JS_ARGUMENTS = '--terminal'
- CLEAN_CSS_BINARY = 'cleancss'
- CLEAN_CSS_ARGUMENTS = ''
+ CLOSURE_COMPILER_BINARY = "java -jar compiler.jar"
+ CLOSURE_COMPILER_ARGUMENTS = ""
+ YUI_BINARY = "java -jar yuicompressor.jar"
+ YUI_CSS_ARGUMENTS = ""
+ YUI_JS_ARGUMENTS = ""
+ YUGLIFY_BINARY = "yuglify"
+ YUGLIFY_CSS_ARGUMENTS = "--terminal"
+ YUGLIFY_JS_ARGUMENTS = "--terminal"
+ CLEAN_CSS_BINARY = "cleancss"
+ CLEAN_CSS_ARGUMENTS = ""
DATA_URI_MAX_SIZE = 1024
# the cache backend to use
CACHE_BACKEND = None
# the dotted path to the function that creates the cache key
- CACHE_KEY_FUNCTION = 'compressor.cache.simple_cachekey'
+ CACHE_KEY_FUNCTION = "compressor.cache.simple_cachekey"
# rebuilds the cache every 30 days if nothing has changed.
REBUILD_TIMEOUT = 60 * 60 * 24 * 30 # 30 days
# the upper bound on how long any compression should take to be generated
@@ -74,68 +74,74 @@ class CompressorConf(AppConf):
# The context to be used when compressing the files "offline"
OFFLINE_CONTEXT = {}
# The name of the manifest file (e.g. filename.ext)
- OFFLINE_MANIFEST = 'manifest.json'
- OFFLINE_MANIFEST_STORAGE = 'compressor.storage.OfflineManifestFileStorage'
+ OFFLINE_MANIFEST = "manifest.json"
+ OFFLINE_MANIFEST_STORAGE = "compressor.storage.OfflineManifestFileStorage"
# The Context to be used when TemplateFilter is used
TEMPLATE_FILTER_CONTEXT = {}
# Placeholder to be used instead of settings.COMPRESS_URL during offline compression.
# Affects manifest file contents only.
- URL_PLACEHOLDER = '/__compressor_url_placeholder__/'
+ URL_PLACEHOLDER = "/__compressor_url_placeholder__/"
# Returns the Jinja2 environment to use in offline compression.
def JINJA2_GET_ENVIRONMENT():
- alias = 'jinja2'
+ alias = "jinja2"
try:
from django.template import engines
+
return engines[alias].env
except InvalidTemplateEngineError:
raise InvalidTemplateEngineError(
"Could not find config for '{}' "
"in settings.TEMPLATES. "
"COMPRESS_JINJA2_GET_ENVIRONMENT() may "
- "need to be defined in settings".format(alias))
+ "need to be defined in settings".format(alias)
+ )
except ImportError:
return None
class Meta:
- prefix = 'compress'
+ prefix = "compress"
def configure_root(self, value):
# Uses Django's STATIC_ROOT by default
if value is None:
value = settings.STATIC_ROOT
if value is None:
- raise ImproperlyConfigured('COMPRESS_ROOT defaults to '
- + 'STATIC_ROOT, please define either')
+ raise ImproperlyConfigured(
+ "COMPRESS_ROOT defaults to " + "STATIC_ROOT, please define either"
+ )
return os.path.normcase(os.path.abspath(value))
def configure_url(self, value):
# Uses Django's STATIC_URL by default
if value is None:
value = settings.STATIC_URL
- if not value.endswith('/'):
- raise ImproperlyConfigured("URL settings (e.g. COMPRESS_URL) "
- "must have a trailing slash")
+ if not value.endswith("/"):
+ raise ImproperlyConfigured(
+ "URL settings (e.g. COMPRESS_URL) " "must have a trailing slash"
+ )
return value
def configure_cache_backend(self, value):
if value is None:
- value = 'default'
+ value = "default"
return value
def configure_offline_context(self, value):
if not value:
- value = {'STATIC_URL': settings.STATIC_URL}
+ value = {"STATIC_URL": settings.STATIC_URL}
return value
def configure_template_filter_context(self, value):
if not value:
- value = {'STATIC_URL': settings.STATIC_URL}
+ value = {"STATIC_URL": settings.STATIC_URL}
return value
def configure_precompilers(self, value):
if not isinstance(value, (list, tuple)):
- raise ImproperlyConfigured("The COMPRESS_PRECOMPILERS setting "
- "must be a list or tuple. Check for "
- "missing commas.")
+ raise ImproperlyConfigured(
+ "The COMPRESS_PRECOMPILERS setting "
+ "must be a list or tuple. Check for "
+ "missing commas."
+ )
return value
diff --git a/compressor/contrib/jinja2ext.py b/compressor/contrib/jinja2ext.py
index c519ceb..19a053d 100644
--- a/compressor/contrib/jinja2ext.py
+++ b/compressor/contrib/jinja2ext.py
@@ -15,7 +15,7 @@ def const(node):
class CompressorExtension(compress.CompressorMixin, Extension):
- tags = set(['compress'])
+ tags = set(["compress"])
def parse(self, parser):
# Store the first lineno for the actual function call
@@ -29,44 +29,52 @@ class CompressorExtension(compress.CompressorMixin, Extension):
args.append(kindarg)
else:
raise TemplateSyntaxError(
- 'Compress kind may be one of: %r, got: %r' % (
- self.compressors.keys(), kindarg.value),
- parser.stream.current.lineno)
+ "Compress kind may be one of: %r, got: %r"
+ % (self.compressors.keys(), kindarg.value),
+ parser.stream.current.lineno,
+ )
# For legacy support, allow for a commma but simply ignore it
- parser.stream.skip_if('comma')
+ parser.stream.skip_if("comma")
# Some sane defaults for file output
namearg = nodes.Const(None)
- modearg = nodes.Const('file')
+ modearg = nodes.Const("file")
# If we're not at the "%}" part yet we must have a output mode argument
- if parser.stream.current.type != 'block_end':
+ if parser.stream.current.type != "block_end":
modearg = const(parser.parse_expression())
args.append(modearg)
if modearg.value == compress.OUTPUT_FILE:
# The file mode optionally accepts a name
- if parser.stream.current.type != 'block_end':
+ if parser.stream.current.type != "block_end":
namearg = const(parser.parse_expression())
- elif modearg.value == compress.OUTPUT_INLINE or modearg.value == compress.OUTPUT_PRELOAD:
+ elif (
+ modearg.value == compress.OUTPUT_INLINE
+ or modearg.value == compress.OUTPUT_PRELOAD
+ ):
pass
else:
raise TemplateSyntaxError(
- 'Compress mode may be one of: %r, got %r' % (
- compress.OUTPUT_MODES, modearg.value),
- parser.stream.current.lineno)
+ "Compress mode may be one of: %r, got %r"
+ % (compress.OUTPUT_MODES, modearg.value),
+ parser.stream.current.lineno,
+ )
# Parse everything between the compress and endcompress tags
- body = parser.parse_statements(['name:endcompress'], drop_needle=True)
+ body = parser.parse_statements(["name:endcompress"], drop_needle=True)
# Skip the kind if used in the endblock, by using the kind in the
# endblock the templates are slightly more readable.
- parser.stream.skip_if('name:' + kindarg.value)
+ parser.stream.skip_if("name:" + kindarg.value)
return nodes.CallBlock(
- self.call_method('_compress_normal', [kindarg, modearg, namearg]),
- [], [], body).set_lineno(lineno)
+ self.call_method("_compress_normal", [kindarg, modearg, namearg]),
+ [],
+ [],
+ body,
+ ).set_lineno(lineno)
def _compress_forced(self, kind, mode, name, caller):
return self._compress(kind, mode, name, caller, True)
@@ -77,10 +85,8 @@ class CompressorExtension(compress.CompressorMixin, Extension):
def _compress(self, kind, mode, name, caller, forced):
mode = mode or compress.OUTPUT_FILE
original_content = caller()
- context = {
- 'original_content': original_content
- }
+ context = {"original_content": original_content}
return self.render_compressed(context, kind, mode, name, forced=forced)
def get_original_content(self, context):
- return context['original_content']
+ return context["original_content"]
diff --git a/compressor/contrib/sekizai.py b/compressor/contrib/sekizai.py
index 4100d70..0327cb5 100644
--- a/compressor/contrib/sekizai.py
+++ b/compressor/contrib/sekizai.py
@@ -22,42 +22,47 @@ def compress(context, data, name):
"""
# separate compressible from uncompressable files
parser = get_class(settings.COMPRESS_PARSER)(data)
- js_compressor, css_compressor = Compressor('js'), Compressor('css')
+ js_compressor, css_compressor = Compressor("js"), Compressor("css")
compressable_elements, expanded_elements, deferred_elements = [], [], []
- if name == 'js':
+ if name == "js":
for elem in parser.js_elems():
attribs = parser.elem_attribs(elem)
try:
- if 'src' in attribs:
- js_compressor.get_basename(attribs['src'])
+ if "src" in attribs:
+ js_compressor.get_basename(attribs["src"])
except UncompressableFileError:
- if 'defer' in attribs:
+ if "defer" in attribs:
deferred_elements.append(elem)
else:
expanded_elements.append(elem)
else:
compressable_elements.append(elem)
- elif name == 'css':
+ elif name == "css":
for elem in parser.css_elems():
attribs = parser.elem_attribs(elem)
try:
- if parser.elem_name(elem) == 'link' and attribs['rel'].lower() == 'stylesheet':
- css_compressor.get_basename(attribs['href'])
+ if (
+ parser.elem_name(elem) == "link"
+ and attribs["rel"].lower() == "stylesheet"
+ ):
+ css_compressor.get_basename(attribs["href"])
except UncompressableFileError:
expanded_elements.append(elem)
else:
compressable_elements.append(elem)
# reconcatenate them
- data = ''.join(parser.elem_str(e) for e in expanded_elements)
- expanded_node = CompressorNode(nodelist=TextNode(data), kind=name, mode='file')
- data = ''.join(parser.elem_str(e) for e in compressable_elements)
- compressable_node = CompressorNode(nodelist=TextNode(data), kind=name, mode='file')
- data = ''.join(parser.elem_str(e) for e in deferred_elements)
- deferred_node = CompressorNode(nodelist=TextNode(data), kind=name, mode='file')
+ data = "".join(parser.elem_str(e) for e in expanded_elements)
+ expanded_node = CompressorNode(nodelist=TextNode(data), kind=name, mode="file")
+ data = "".join(parser.elem_str(e) for e in compressable_elements)
+ compressable_node = CompressorNode(nodelist=TextNode(data), kind=name, mode="file")
+ data = "".join(parser.elem_str(e) for e in deferred_elements)
+ deferred_node = CompressorNode(nodelist=TextNode(data), kind=name, mode="file")
- return '\n'.join([
- expanded_node.get_original_content(context=context),
- compressable_node.render(context=context),
- deferred_node.get_original_content(context=context),
- ])
+ return "\n".join(
+ [
+ expanded_node.get_original_content(context=context),
+ compressable_node.render(context=context),
+ deferred_node.get_original_content(context=context),
+ ]
+ )
diff --git a/compressor/css.py b/compressor/css.py
index 69e71d3..baaf07f 100644
--- a/compressor/css.py
+++ b/compressor/css.py
@@ -4,7 +4,7 @@ from compressor.conf import settings
class CssCompressor(Compressor):
- output_mimetypes = {'text/css'}
+ output_mimetypes = {"text/css"}
def split_contents(self):
if self.split_content:
@@ -14,17 +14,23 @@ class CssCompressor(Compressor):
data = None
elem_name = self.parser.elem_name(elem)
elem_attribs = self.parser.elem_attribs(elem)
- if elem_name == 'link' and 'rel' in elem_attribs and elem_attribs['rel'].lower() == 'stylesheet':
- basename = self.get_basename(elem_attribs['href'])
+ if (
+ elem_name == "link"
+ and "rel" in elem_attribs
+ and elem_attribs["rel"].lower() == "stylesheet"
+ ):
+ basename = self.get_basename(elem_attribs["href"])
filename = self.get_filename(basename)
data = (SOURCE_FILE, filename, basename, elem)
- elif elem_name == 'style':
+ elif elem_name == "style":
data = (SOURCE_HUNK, self.parser.elem_content(elem), None, elem)
if data:
self.split_content.append(data)
- media = elem_attribs.get('media', None)
+ media = elem_attribs.get("media", None)
# Append to the previous node if it had the same media type
- append_to_previous = self.media_nodes and self.media_nodes[-1][0] == media
+ append_to_previous = (
+ self.media_nodes and self.media_nodes[-1][0] == media
+ )
# and we are not just precompiling, otherwise create a new node.
if append_to_previous and settings.COMPRESS_ENABLED:
self.media_nodes[-1][1].split_content.append(data)
@@ -35,14 +41,17 @@ class CssCompressor(Compressor):
return self.split_content
def output(self, *args, **kwargs):
- if (settings.COMPRESS_ENABLED or settings.COMPRESS_PRECOMPILERS
- or kwargs.get('forced', False)):
+ if (
+ settings.COMPRESS_ENABLED
+ or settings.COMPRESS_PRECOMPILERS
+ or kwargs.get("forced", False)
+ ):
# Populate self.split_content
self.split_contents()
- if hasattr(self, 'media_nodes'):
+ if hasattr(self, "media_nodes"):
ret = []
for media, subnode in self.media_nodes:
- subnode.extra_context.update({'media': media})
+ subnode.extra_context.update({"media": media})
ret.append(subnode.output(*args, **kwargs))
- return ''.join(ret)
+ return "".join(ret)
return super().output(*args, **kwargs)
diff --git a/compressor/exceptions.py b/compressor/exceptions.py
index c2d7c60..273743a 100644
--- a/compressor/exceptions.py
+++ b/compressor/exceptions.py
@@ -2,6 +2,7 @@ class CompressorError(Exception):
"""
A general error of the compressor
"""
+
pass
@@ -9,6 +10,7 @@ class UncompressableFileError(Exception):
"""
This exception is raised when a file cannot be compressed
"""
+
pass
@@ -16,6 +18,7 @@ class FilterError(Exception):
"""
This exception is raised when a filter fails
"""
+
pass
@@ -23,6 +26,7 @@ class ParserError(Exception):
"""
This exception is raised when the parser fails
"""
+
pass
@@ -30,6 +34,7 @@ class OfflineGenerationError(Exception):
"""
Offline compression generation related exceptions
"""
+
pass
@@ -37,6 +42,7 @@ class FilterDoesNotExist(Exception):
"""
Raised when a filter class cannot be found.
"""
+
pass
@@ -44,6 +50,7 @@ class TemplateDoesNotExist(Exception):
"""
This exception is raised when a template does not exist.
"""
+
pass
@@ -51,4 +58,5 @@ class TemplateSyntaxError(Exception):
"""
This exception is raised when a template syntax error is encountered.
"""
+
pass
diff --git a/compressor/filters/__init__.py b/compressor/filters/__init__.py
index efeee86..0692071 100644
--- a/compressor/filters/__init__.py
+++ b/compressor/filters/__init__.py
@@ -1,3 +1,8 @@
# flake8: noqa
-from compressor.filters.base import (FilterBase, CallbackOutputFilter,
- CompilerFilter, CachedCompilerFilter, FilterError)
+from compressor.filters.base import (
+ FilterBase,
+ CallbackOutputFilter,
+ CompilerFilter,
+ CachedCompilerFilter,
+ FilterError,
+)
diff --git a/compressor/filters/base.py b/compressor/filters/base.py
index d660768..767f0a8 100644
--- a/compressor/filters/base.py
+++ b/compressor/filters/base.py
@@ -9,10 +9,12 @@ if system() != "Windows":
from shlex import quote as shell_quote
else:
from subprocess import list2cmdline
+
def shell_quote(s):
# shlex.quote/pipes.quote is not compatible with Windows
return list2cmdline([s])
+
from django.core.exceptions import ImproperlyConfigured
from django.core.files.temp import NamedTemporaryFile
from django.utils.encoding import smart_str
@@ -41,9 +43,17 @@ class FilterBase:
# This flag allows those filters to do so.
run_with_compression_disabled = False
- def __init__(self, content, attrs=None, filter_type=None, filename=None,
- verbose=0, charset=None, **kwargs):
- self.type = filter_type or getattr(self, 'type', None)
+ def __init__(
+ self,
+ content,
+ attrs=None,
+ filter_type=None,
+ filename=None,
+ verbose=0,
+ charset=None,
+ **kwargs
+ ):
+ self.type = filter_type or getattr(self, "type", None)
self.content = content
self.verbose = verbose or settings.COMPRESS_VERBOSE
self.logger = logger
@@ -68,6 +78,7 @@ class CallbackOutputFilter(FilterBase):
Callback should be a function which takes a string as first argument and
returns a string.
"""
+
callback = None
args = []
kwargs = {}
@@ -77,8 +88,9 @@ class CallbackOutputFilter(FilterBase):
super().__init__(*args, **kwargs)
if self.callback is None:
raise ImproperlyConfigured(
- "The callback filter %s must define a 'callback' attribute." %
- self.__class__.__name__)
+ "The callback filter %s must define a 'callback' attribute."
+ % self.__class__.__name__
+ )
try:
mod_name, func_name = get_mod_func(self.callback)
func = getattr(import_module(mod_name), func_name)
@@ -87,16 +99,20 @@ class CallbackOutputFilter(FilterBase):
if len(self.dependencies) == 1:
warning = "dependency (%s) is" % self.dependencies[0]
else:
- warning = ("dependencies (%s) are" %
- ", ".join([dep for dep in self.dependencies]))
+ warning = "dependencies (%s) are" % ", ".join(
+ [dep for dep in self.dependencies]
+ )
else:
warning = ""
raise ImproperlyConfigured(
"The callback %s couldn't be imported. Make sure the %s "
- "correctly installed." % (self.callback, warning))
+ "correctly installed." % (self.callback, warning)
+ )
except AttributeError as e:
- raise ImproperlyConfigured("An error occurred while importing the "
- "callback filter %s: %s" % (self, e))
+ raise ImproperlyConfigured(
+ "An error occurred while importing the "
+ "callback filter %s: %s" % (self, e)
+ )
else:
self._callback_func = func
@@ -111,11 +127,11 @@ class CompilerFilter(FilterBase):
A filter subclass that is able to filter content via
external commands.
"""
+
command = None
options = ()
default_encoding = (
- settings.FILE_CHARSET if settings.is_overridden('FILE_CHARSET') else
- 'utf-8'
+ settings.FILE_CHARSET if settings.is_overridden("FILE_CHARSET") else "utf-8"
)
def __init__(self, content, command=None, **kwargs):
@@ -149,7 +165,7 @@ class CompilerFilter(FilterBase):
if self.infile is None and "{infile}" in self.command:
# create temporary input file if needed
if self.filename is None:
- self.infile = NamedTemporaryFile(mode='wb')
+ self.infile = NamedTemporaryFile(mode="wb")
self.infile.write(self.content.encode(encoding))
self.infile.flush()
options["infile"] = self.infile.name
@@ -165,7 +181,7 @@ class CompilerFilter(FilterBase):
if "{outfile}" in self.command and "outfile" not in options:
# create temporary output file if needed
ext = self.type and ".%s" % self.type or ""
- self.outfile = NamedTemporaryFile(mode='r+', suffix=ext)
+ self.outfile = NamedTemporaryFile(mode="r+", suffix=ext)
options["outfile"] = self.outfile.name
# Quote infile and outfile for spaces etc.
@@ -177,34 +193,42 @@ class CompilerFilter(FilterBase):
try:
command = self.command.format(**options)
proc = subprocess.Popen(
- command, shell=True, cwd=self.cwd, stdout=self.stdout,
- stdin=self.stdin, stderr=self.stderr)
+ command,
+ shell=True,
+ cwd=self.cwd,
+ stdout=self.stdout,
+ stdin=self.stdin,
+ stderr=self.stderr,
+ )
if self.infile is None:
# if infile is None then send content to process' stdin
- filtered, err = proc.communicate(
- self.content.encode(encoding))
+ filtered, err = proc.communicate(self.content.encode(encoding))
else:
filtered, err = proc.communicate()
filtered, err = filtered.decode(encoding), err.decode(encoding)
except (IOError, OSError) as e:
- raise FilterError('Unable to apply %s (%r): %s' %
- (self.__class__.__name__, self.command, e))
+ raise FilterError(
+ "Unable to apply %s (%r): %s"
+ % (self.__class__.__name__, self.command, e)
+ )
else:
if proc.wait() != 0:
# command failed, raise FilterError exception
if not err:
- err = ('Unable to apply %s (%s)' %
- (self.__class__.__name__, self.command))
+ err = "Unable to apply %s (%s)" % (
+ self.__class__.__name__,
+ self.command,
+ )
if filtered:
- err += '\n%s' % filtered
+ err += "\n%s" % filtered
raise FilterError(err)
if self.verbose:
self.logger.debug(err)
- outfile_path = options.get('outfile')
+ outfile_path = options.get("outfile")
if outfile_path:
- with io.open(outfile_path, 'r', encoding=encoding) as file:
+ with io.open(outfile_path, "r", encoding=encoding) as file:
filtered = file.read()
finally:
if self.infile is not None:
@@ -215,7 +239,6 @@ class CompilerFilter(FilterBase):
class CachedCompilerFilter(CompilerFilter):
-
def __init__(self, mimetype, *args, **kwargs):
self.mimetype = mimetype
super().__init__(*args, **kwargs)
diff --git a/compressor/filters/css_default.py b/compressor/filters/css_default.py
index 9a84fab..41091ac 100644
--- a/compressor/filters/css_default.py
+++ b/compressor/filters/css_default.py
@@ -6,16 +6,19 @@ from compressor.cache import get_hashed_mtime, get_hashed_content
from compressor.conf import settings
from compressor.filters import FilterBase, FilterError
-URL_PATTERN = re.compile(r"""
+URL_PATTERN = re.compile(
+ r"""
url\(
\s* # any amount of whitespace
([\'"]?) # optional quote
(.*?) # any amount of anything, non-greedily (this is the actual url)
\1 # matching quote (or nothing if there was none)
\s* # any amount of whitespace
- \)""", re.VERBOSE)
+ \)""",
+ re.VERBOSE,
+)
SRC_PATTERN = re.compile(r'src=([\'"])(.*?)\1')
-SCHEMES = ('http://', 'https://', '/')
+SCHEMES = ("http://", "https://", "/")
class CssAbsoluteFilter(FilterBase):
@@ -25,25 +28,26 @@ class CssAbsoluteFilter(FilterBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.root = settings.COMPRESS_ROOT
- self.url = settings.COMPRESS_URL.rstrip('/')
+ self.url = settings.COMPRESS_URL.rstrip("/")
self.url_path = self.url
self.has_scheme = False
def input(self, filename=None, basename=None, **kwargs):
if not filename:
return self.content
- self.path = basename.replace(os.sep, '/')
- self.path = self.path.lstrip('/')
- if self.url.startswith(('http://', 'https://')):
+ self.path = basename.replace(os.sep, "/")
+ self.path = self.path.lstrip("/")
+ if self.url.startswith(("http://", "https://")):
self.has_scheme = True
- parts = self.url.split('/')
- self.url = '/'.join(parts[2:])
- self.url_path = '/%s' % '/'.join(parts[3:])
- self.protocol = '%s/' % '/'.join(parts[:2])
+ parts = self.url.split("/")
+ self.url = "/".join(parts[2:])
+ self.url_path = "/%s" % "/".join(parts[3:])
+ self.protocol = "%s/" % "/".join(parts[:2])
self.host = parts[2]
- self.directory_name = '/'.join((self.url, os.path.dirname(self.path)))
- return SRC_PATTERN.sub(self.src_converter,
- URL_PATTERN.sub(self.url_converter, self.content))
+ self.directory_name = "/".join((self.url, os.path.dirname(self.path)))
+ return SRC_PATTERN.sub(
+ self.src_converter, URL_PATTERN.sub(self.url_converter, self.content)
+ )
def guess_filename(self, url):
local_path = url
@@ -60,7 +64,7 @@ class CssAbsoluteFilter(FilterBase):
if local_path.startswith(self.url_path):
local_path = local_path.replace(self.url_path, "", 1)
# Re-build the local full path by adding root
- filename = os.path.join(self.root, local_path.lstrip('/'))
+ filename = os.path.join(self.root, local_path.lstrip("/"))
return os.path.exists(filename) and filename
def add_suffix(self, url):
@@ -78,9 +82,10 @@ class CssAbsoluteFilter(FilterBase):
elif settings.COMPRESS_CSS_HASHING_METHOD in ("hash", "content"):
suffix = get_hashed_content(filename)
else:
- raise FilterError('COMPRESS_CSS_HASHING_METHOD is configured '
- 'with an unknown method (%s).' %
- settings.COMPRESS_CSS_HASHING_METHOD)
+ raise FilterError(
+ "COMPRESS_CSS_HASHING_METHOD is configured "
+ "with an unknown method (%s)." % settings.COMPRESS_CSS_HASHING_METHOD
+ )
fragment = None
if "#" in url:
url, fragment = url.rsplit("#", 1)
@@ -93,12 +98,11 @@ class CssAbsoluteFilter(FilterBase):
return url
def _converter(self, url):
- if url.startswith(('#', 'data:')):
+ if url.startswith(("#", "data:")):
return url
elif url.startswith(SCHEMES):
return self.add_suffix(url)
- full_url = posixpath.normpath('/'.join([str(self.directory_name),
- url]))
+ full_url = posixpath.normpath("/".join([str(self.directory_name), url]))
if self.has_scheme:
full_url = "%s%s" % (self.protocol, full_url)
full_url = self.add_suffix(full_url)
@@ -149,11 +153,15 @@ class CssRelativeFilter(CssAbsoluteFilter):
"""
old_prefix = self.url
if self.has_scheme:
- old_prefix = '{}{}'.format(self.protocol, old_prefix)
+ old_prefix = "{}{}".format(self.protocol, old_prefix)
# One level up from 'css' / 'js' folder
- new_prefix = '..'
+ new_prefix = ".."
# N levels up from ``settings.COMPRESS_OUTPUT_DIR``
- new_prefix += '/..' * len(list(filter(
- None, os.path.normpath(settings.COMPRESS_OUTPUT_DIR).split(os.sep)
- )))
- return re.sub('^{}'.format(old_prefix), new_prefix, url)
+ new_prefix += "/.." * len(
+ list(
+ filter(
+ None, os.path.normpath(settings.COMPRESS_OUTPUT_DIR).split(os.sep)
+ )
+ )
+ )
+ return re.sub("^{}".format(old_prefix), new_prefix, url)
diff --git a/compressor/filters/cssmin/__init__.py b/compressor/filters/cssmin/__init__.py
index cc23cd2..282d671 100644
--- a/compressor/filters/cssmin/__init__.py
+++ b/compressor/filters/cssmin/__init__.py
@@ -6,6 +6,7 @@ class CSSCompressorFilter(CallbackOutputFilter):
A filter that utilizes Yury Selivanov's Python port of the YUI CSS
compression algorithm: https://pypi.python.org/pypi/csscompressor
"""
+
callback = "csscompressor.compress"
dependencies = ["csscompressor"]
@@ -13,9 +14,7 @@ class CSSCompressorFilter(CallbackOutputFilter):
class rCSSMinFilter(CallbackOutputFilter):
callback = "rcssmin.cssmin"
dependencies = ["rcssmin"]
- kwargs = {
- "keep_bang_comments": True
- }
+ kwargs = {"keep_bang_comments": True}
# This is for backwards compatibility.
diff --git a/compressor/filters/datauri.py b/compressor/filters/datauri.py
index bd08a7d..ab9bca0 100644
--- a/compressor/filters/datauri.py
+++ b/compressor/filters/datauri.py
@@ -17,6 +17,7 @@ class DataUriFilter(FilterBase):
Don't use this class directly. Use a subclass.
"""
+
def input(self, filename=None, **kwargs):
if not filename or not filename.startswith(settings.COMPRESS_ROOT):
return self.content
@@ -31,18 +32,19 @@ class DataUriFilter(FilterBase):
url = url.split("?")[0]
if "#" in url:
url = url.split("#")[0]
- return os.path.join(
- settings.COMPRESS_ROOT, url[len(settings.COMPRESS_URL):])
+ return os.path.join(settings.COMPRESS_ROOT, url[len(settings.COMPRESS_URL) :])
def data_uri_converter(self, matchobj):
- url = matchobj.group(1).strip(' \'"')
- if not url.startswith('data:') and not url.startswith('//'):
+ url = matchobj.group(1).strip(" '\"")
+ if not url.startswith("data:") and not url.startswith("//"):
path = self.get_file_path(url)
if os.stat(path).st_size <= settings.COMPRESS_DATA_URI_MAX_SIZE:
- with open(path, 'rb') as file:
- data = b64encode(file.read()).decode('ascii')
+ with open(path, "rb") as file:
+ data = b64encode(file.read()).decode("ascii")
return 'url("data:%s;base64,%s")' % (
- mimetypes.guess_type(path)[0], data)
+ mimetypes.guess_type(path)[0],
+ data,
+ )
return 'url("%s")' % url
@@ -51,6 +53,5 @@ class CssDataUriFilter(DataUriFilter):
See DataUriFilter.
"""
- url_patterns = (
- re.compile(r'url\(([^\)]+)\)'),
- )
+
+ url_patterns = (re.compile(r"url\(([^\)]+)\)"),)
diff --git a/compressor/filters/jsmin/__init__.py b/compressor/filters/jsmin/__init__.py
index c1a64cf..a517cfd 100644
--- a/compressor/filters/jsmin/__init__.py
+++ b/compressor/filters/jsmin/__init__.py
@@ -34,11 +34,11 @@ class SlimItFilter(CallbackOutputFilter):
class CalmjsFilter(FilterBase):
def __init__(self, *args, **kwargs):
try:
- self._parser = kwargs.pop('parser')
+ self._parser = kwargs.pop("parser")
except KeyError:
self._parser = None
try:
- self._unparser = kwargs.pop('unparser')
+ self._unparser = kwargs.pop("unparser")
except KeyError:
self._unparser = None
super().__init__(*args, **kwargs)
@@ -56,6 +56,6 @@ class CalmjsFilter(FilterBase):
def output(self, **kwargs):
program = self._parser(self.content)
- minified = u''.join(part.text for part in self._unparser(program))
+ minified = "".join(part.text for part in self._unparser(program))
assert isinstance(minified, str)
return minified
diff --git a/compressor/filters/template.py b/compressor/filters/template.py
index 8bf7365..768cfab 100644
--- a/compressor/filters/template.py
+++ b/compressor/filters/template.py
@@ -5,7 +5,6 @@ from compressor.filters import FilterBase
class TemplateFilter(FilterBase):
-
def input(self, filename=None, basename=None, **kwargs):
template = Template(self.content)
context = Context(settings.COMPRESS_TEMPLATE_FILTER_CONTEXT)
diff --git a/compressor/filters/yuglify.py b/compressor/filters/yuglify.py
index b3a6e4f..940037f 100644
--- a/compressor/filters/yuglify.py
+++ b/compressor/filters/yuglify.py
@@ -7,11 +7,11 @@ class YUglifyFilter(CompilerFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.command += ' --type=%s' % self.type
+ self.command += " --type=%s" % self.type
class YUglifyCSSFilter(YUglifyFilter):
- type = 'css'
+ type = "css"
options = (
("binary", settings.COMPRESS_YUGLIFY_BINARY),
("args", settings.COMPRESS_YUGLIFY_CSS_ARGUMENTS),
@@ -19,7 +19,7 @@ class YUglifyCSSFilter(YUglifyFilter):
class YUglifyJSFilter(YUglifyFilter):
- type = 'js'
+ type = "js"
options = (
("binary", settings.COMPRESS_YUGLIFY_BINARY),
("args", settings.COMPRESS_YUGLIFY_JS_ARGUMENTS),
diff --git a/compressor/filters/yui.py b/compressor/filters/yui.py
index f4beea3..5a91c04 100644
--- a/compressor/filters/yui.py
+++ b/compressor/filters/yui.py
@@ -7,13 +7,13 @@ class YUICompressorFilter(CompilerFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.command += ' --type=%s' % self.type
+ self.command += " --type=%s" % self.type
if self.verbose:
- self.command += ' --verbose'
+ self.command += " --verbose"
class YUICSSFilter(YUICompressorFilter):
- type = 'css'
+ type = "css"
options = (
("binary", settings.COMPRESS_YUI_BINARY),
("args", settings.COMPRESS_YUI_CSS_ARGUMENTS),
@@ -21,7 +21,7 @@ class YUICSSFilter(YUICompressorFilter):
class YUIJSFilter(YUICompressorFilter):
- type = 'js'
+ type = "js"
options = (
("binary", settings.COMPRESS_YUI_BINARY),
("args", settings.COMPRESS_YUI_JS_ARGUMENTS),
diff --git a/compressor/finders.py b/compressor/finders.py
index 7de1fa2..66da32a 100644
--- a/compressor/finders.py
+++ b/compressor/finders.py
@@ -9,6 +9,7 @@ class CompressorFinder(staticfiles.finders.BaseStorageFinder):
with staticfiles development file server or during
deployment.
"""
+
storage = CompressorFileStorage
def list(self, ignore_patterns):
diff --git a/compressor/js.py b/compressor/js.py
index 6d5b29d..0eb2290 100644
--- a/compressor/js.py
+++ b/compressor/js.py
@@ -4,7 +4,7 @@ from compressor.base import Compressor, SOURCE_HUNK, SOURCE_FILE
class JsCompressor(Compressor):
- output_mimetypes = {'text/javascript'}
+ output_mimetypes = {"text/javascript"}
def split_contents(self):
if self.split_content:
@@ -12,22 +12,21 @@ class JsCompressor(Compressor):
self.extra_nodes = []
for elem in self.parser.js_elems():
attribs = self.parser.elem_attribs(elem)
- if 'src' in attribs:
- basename = self.get_basename(attribs['src'])
+ if "src" in attribs:
+ basename = self.get_basename(attribs["src"])
filename = self.get_filename(basename)
content = (SOURCE_FILE, filename, basename, elem)
else:
content = (SOURCE_HUNK, self.parser.elem_content(elem), None, elem)
self.split_content.append(content)
- if 'async' in attribs:
- extra = ' async'
- elif 'defer' in attribs:
- extra = ' defer'
+ if "async" in attribs:
+ extra = " async"
+ elif "defer" in attribs:
+ extra = " defer"
else:
- extra = ''
+ extra = ""
# Append to the previous node if it had the same attribute
- append_to_previous = (self.extra_nodes
- and self.extra_nodes[-1][0] == extra)
+ append_to_previous = self.extra_nodes and self.extra_nodes[-1][0] == extra
if append_to_previous and settings.COMPRESS_ENABLED:
self.extra_nodes[-1][1].split_content.append(content)
else:
@@ -37,15 +36,18 @@ class JsCompressor(Compressor):
return self.split_content
def output(self, *args, **kwargs):
- if (settings.COMPRESS_ENABLED or settings.COMPRESS_PRECOMPILERS
- or kwargs.get('forced', False)):
+ if (
+ settings.COMPRESS_ENABLED
+ or settings.COMPRESS_PRECOMPILERS
+ or kwargs.get("forced", False)
+ ):
self.split_contents()
- if hasattr(self, 'extra_nodes'):
+ if hasattr(self, "extra_nodes"):
ret = []
for extra, subnode in self.extra_nodes:
- subnode.extra_context.update({'extra': extra})
+ subnode.extra_context.update({"extra": extra})
ret.append(subnode.output(*args, **kwargs))
- return '\n'.join(ret)
+ return "\n".join(ret)
return super().output(*args, **kwargs)
def filter_input(self, forced=False):
diff --git a/compressor/management/commands/compress.py b/compressor/management/commands/compress.py
index aa09a27..23c35a2 100644
--- a/compressor/management/commands/compress.py
+++ b/compressor/management/commands/compress.py
@@ -13,17 +13,27 @@ from django.core.management.base import BaseCommand, CommandError
import django.template
from django.template import Context
from django.utils.encoding import smart_str
-from django.template.loader import get_template # noqa Leave this in to preload template locations
+from django.template.loader import (
+ get_template,
+) # noqa Leave this in to preload template locations
from django.template import engines
-from compressor.cache import get_offline_hexdigest, write_offline_manifest, get_offline_manifest
+from compressor.cache import (
+ get_offline_hexdigest,
+ write_offline_manifest,
+ get_offline_manifest,
+)
from compressor.conf import settings
-from compressor.exceptions import (OfflineGenerationError, TemplateSyntaxError,
- TemplateDoesNotExist)
+from compressor.exceptions import (
+ OfflineGenerationError,
+ TemplateSyntaxError,
+ TemplateDoesNotExist,
+)
from compressor.utils import get_mod_func
offline_manifest_lock = Lock()
+
class Command(BaseCommand):
help = "Compress content outside of the request/response cycle"
@@ -33,30 +43,51 @@ class Command(BaseCommand):
requires_system_checks = False
def add_arguments(self, parser):
- parser.add_argument('--extension', '-e', action='append', dest='extensions',
- help='The file extension(s) to examine (default: ".html", '
- 'separate multiple extensions with commas, or use -e '
- 'multiple times)')
- parser.add_argument('-f', '--force', default=False, action='store_true',
- help="Force the generation of compressed content even if the "
- "COMPRESS_ENABLED setting is not True.", dest='force')
- parser.add_argument('--follow-links', default=False, action='store_true',
- help="Follow symlinks when traversing the COMPRESS_ROOT "
- "(which defaults to STATIC_ROOT). Be aware that using this "
- "can lead to infinite recursion if a link points to a parent "
- "directory of itself.", dest='follow_links')
- parser.add_argument('--engine', default=[], action="append",
- help="Specifies the templating engine. jinja2 and django are "
- "supported. It may be a specified more than once for "
- "multiple engines. If not specified, django engine is used.",
- dest="engines")
+ parser.add_argument(
+ "--extension",
+ "-e",
+ action="append",
+ dest="extensions",
+ help='The file extension(s) to examine (default: ".html", '
+ "separate multiple extensions with commas, or use -e "
+ "multiple times)",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ default=False,
+ action="store_true",
+ help="Force the generation of compressed content even if the "
+ "COMPRESS_ENABLED setting is not True.",
+ dest="force",
+ )
+ parser.add_argument(
+ "--follow-links",
+ default=False,
+ action="store_true",
+ help="Follow symlinks when traversing the COMPRESS_ROOT "
+ "(which defaults to STATIC_ROOT). Be aware that using this "
+ "can lead to infinite recursion if a link points to a parent "
+ "directory of itself.",
+ dest="follow_links",
+ )
+ parser.add_argument(
+ "--engine",
+ default=[],
+ action="append",
+ help="Specifies the templating engine. jinja2 and django are "
+ "supported. It may be a specified more than once for "
+ "multiple engines. If not specified, django engine is used.",
+ dest="engines",
+ )
def get_loaders(self):
template_source_loaders = []
for e in engines.all():
- if hasattr(e, 'engine'):
+ if hasattr(e, "engine"):
template_source_loaders.extend(
- e.engine.get_template_loaders(e.engine.loaders))
+ e.engine.get_template_loaders(e.engine.loaders)
+ )
loaders = []
# If template loader is CachedTemplateLoader, return the loaders
# that it wraps around. So if we have
@@ -71,7 +102,7 @@ class Command(BaseCommand):
# The cached Loader and similar ones include a 'loaders' attribute
# so we look for that.
for loader in template_source_loaders:
- if hasattr(loader, 'loaders'):
+ if hasattr(loader, "loaders"):
loaders.extend(loader.loaders)
else:
loaders.append(loader)
@@ -79,15 +110,16 @@ class Command(BaseCommand):
def __get_parser(self, engine):
charset = (
- settings.FILE_CHARSET if settings.is_overridden('FILE_CHARSET')
- else 'utf-8'
+ settings.FILE_CHARSET if settings.is_overridden("FILE_CHARSET") else "utf-8"
)
if engine == "jinja2":
from compressor.offline.jinja2 import Jinja2Parser
+
env = settings.COMPRESS_JINJA2_GET_ENVIRONMENT()
parser = Jinja2Parser(charset=charset, env=env)
elif engine == "django":
from compressor.offline.django import DjangoParser
+
parser = DjangoParser(charset=charset)
else:
raise OfflineGenerationError("Invalid templating engine specified.")
@@ -104,51 +136,67 @@ class Command(BaseCommand):
"""
if not self.get_loaders():
- raise OfflineGenerationError("No template loaders defined. You "
- "must set TEMPLATE_LOADERS in your "
- "settings or set 'loaders' in your "
- "TEMPLATES dictionary.")
+ raise OfflineGenerationError(
+ "No template loaders defined. You "
+ "must set TEMPLATE_LOADERS in your "
+ "settings or set 'loaders' in your "
+ "TEMPLATES dictionary."
+ )
templates = set()
- if engine == 'django':
+ if engine == "django":
paths = set()
for loader in self.get_loaders():
try:
module = import_module(loader.__module__)
- get_template_sources = getattr(module,
- 'get_template_sources', None)
+ get_template_sources = getattr(module, "get_template_sources", None)
if get_template_sources is None:
get_template_sources = loader.get_template_sources
- paths.update(smart_str(origin) for origin in get_template_sources(''))
+ paths.update(
+ smart_str(origin) for origin in get_template_sources("")
+ )
except (ImportError, AttributeError, TypeError):
# Yeah, this didn't work out so well, let's move on
pass
if not paths:
- raise OfflineGenerationError("No template paths found. None of "
- "the configured template loaders "
- "provided template paths. See "
- "https://docs.djangoproject.com/en/2.1/topics/templates/ "
- "for more information on template "
- "loaders.")
+ raise OfflineGenerationError(
+ "No template paths found. None of "
+ "the configured template loaders "
+ "provided template paths. See "
+ "https://docs.djangoproject.com/en/2.1/topics/templates/ "
+ "for more information on template "
+ "loaders."
+ )
if verbosity >= 2:
log.write("Considering paths:\n\t" + "\n\t".join(paths) + "\n")
for path in paths:
for root, dirs, files in os.walk(path, followlinks=follow_links):
- templates.update(os.path.relpath(os.path.join(root, name), path)
- for name in files if not name.startswith('.') and
- any(fnmatch(name, "*%s" % glob) for glob in extensions))
- elif engine == 'jinja2':
+ templates.update(
+ os.path.relpath(os.path.join(root, name), path)
+ for name in files
+ if not name.startswith(".")
+ and any(fnmatch(name, "*%s" % glob) for glob in extensions)
+ )
+ elif engine == "jinja2":
env = settings.COMPRESS_JINJA2_GET_ENVIRONMENT()
- if env and hasattr(env, 'list_templates'):
- templates |= set([env.loader.get_source(env, template)[1] for template in
- env.list_templates(filter_func=lambda _path:
- os.path.splitext(_path)[-1] in extensions)])
+ if env and hasattr(env, "list_templates"):
+ templates |= set(
+ [
+ env.loader.get_source(env, template)[1]
+ for template in env.list_templates(
+ filter_func=lambda _path: os.path.splitext(_path)[-1]
+ in extensions
+ )
+ ]
+ )
if not templates:
- raise OfflineGenerationError("No templates found. Make sure your "
- "TEMPLATE_LOADERS and TEMPLATE_DIRS "
- "settings are correct.")
+ raise OfflineGenerationError(
+ "No templates found. Make sure your "
+ "TEMPLATE_LOADERS and TEMPLATE_DIRS "
+ "settings are correct."
+ )
if verbosity >= 2:
log.write("Found templates:\n\t" + "\n\t".join(templates) + "\n")
@@ -158,8 +206,10 @@ class Command(BaseCommand):
module, function = get_mod_func(contexts)
contexts = getattr(import_module(module), function)()
except (AttributeError, ImportError, TypeError) as e:
- raise ImportError("Couldn't import offline context function %s: %s" %
- (settings.COMPRESS_OFFLINE_CONTEXT, e))
+ raise ImportError(
+ "Couldn't import offline context function %s: %s"
+ % (settings.COMPRESS_OFFLINE_CONTEXT, e)
+ )
elif not isinstance(contexts, (list, tuple)):
contexts = [contexts]
@@ -180,7 +230,9 @@ class Command(BaseCommand):
continue
except TemplateSyntaxError as e: # broken template -> ignore
if verbosity >= 1:
- log.write("Invalid template %s: %s\n" % (template_name, smart_str(e)))
+ log.write(
+ "Invalid template %s: %s\n" % (template_name, smart_str(e))
+ )
continue
except TemplateDoesNotExist: # non existent template -> ignore
if verbosity >= 1:
@@ -188,8 +240,10 @@ class Command(BaseCommand):
continue
except UnicodeDecodeError:
if verbosity >= 1:
- log.write("UnicodeDecodeError while trying to read "
- "template %s\n" % template_name)
+ log.write(
+ "UnicodeDecodeError while trying to read "
+ "template %s\n" % template_name
+ )
continue
contexts_count = 0
@@ -207,12 +261,16 @@ class Command(BaseCommand):
except (TemplateDoesNotExist, TemplateSyntaxError) as e:
# Could be an error in some base template
if verbosity >= 1:
- log.write("Error parsing template %s: %s\n" %
- (template.template_name, smart_str(e)))
+ log.write(
+ "Error parsing template %s: %s\n"
+ % (template.template_name, smart_str(e))
+ )
continue
if nodes:
- template_nodes = compressor_nodes.setdefault(template, OrderedDict())
+ template_nodes = compressor_nodes.setdefault(
+ template, OrderedDict()
+ )
for node in nodes:
nodes_count += 1
template_nodes.setdefault(node, []).append(context)
@@ -222,7 +280,14 @@ class Command(BaseCommand):
template._log = log
template._log_verbosity = verbosity
- pool.submit(self._compress_template, offline_manifest, nodes, parser, template, errors)
+ pool.submit(
+ self._compress_template,
+ offline_manifest,
+ nodes,
+ parser,
+ template,
+ errors,
+ )
pool.shutdown(wait=True)
contexts_count += 1
@@ -234,11 +299,14 @@ class Command(BaseCommand):
raise OfflineGenerationError(
"No 'compress' template tags found in templates."
"Try running compress command with --follow-links and/or"
- "--extension=EXTENSIONS")
+ "--extension=EXTENSIONS"
+ )
if verbosity >= 1:
- log.write("done\nCompressed %d block(s) from %d template(s) for %d context(s).\n" %
- (len(offline_manifest), nodes_count, contexts_count))
+ log.write(
+ "done\nCompressed %d block(s) from %d template(s) for %d context(s).\n"
+ % (len(offline_manifest), nodes_count, contexts_count)
+ )
return offline_manifest, len(offline_manifest), offline_manifest.values()
@staticmethod
@@ -266,8 +334,12 @@ class Command(BaseCommand):
try:
result = parser.render_node(template, context, node)
except Exception as e:
- errors.append(CommandError("An error occurred during rendering %s: "
- "%s" % (template.template_name, smart_str(e))))
+ errors.append(
+ CommandError(
+ "An error occurred during rendering %s: "
+ "%s" % (template.template_name, smart_str(e))
+ )
+ )
del offline_manifest[key]
return
result = result.replace(
@@ -276,7 +348,7 @@ class Command(BaseCommand):
offline_manifest[key] = result
context.pop()
- def handle_extensions(self, extensions=('html',)):
+ def handle_extensions(self, extensions=("html",)):
"""
organizes multiple extensions that are separated with commas or
passed by using --extension/-e multiple times.
@@ -291,10 +363,10 @@ class Command(BaseCommand):
"""
ext_list = []
for ext in extensions:
- ext_list.extend(ext.replace(' ', '').split(','))
+ ext_list.extend(ext.replace(" ", "").split(","))
for i, ext in enumerate(ext_list):
- if not ext.startswith('.'):
- ext_list[i] = '.%s' % ext_list[i]
+ if not ext.startswith("."):
+ ext_list[i] = ".%s" % ext_list[i]
return set(ext_list)
def handle(self, **options):
@@ -304,12 +376,14 @@ class Command(BaseCommand):
if not settings.COMPRESS_ENABLED and not options.get("force"):
raise CommandError(
"Compressor is disabled. Set the COMPRESS_ENABLED "
- "setting or use --force to override.")
+ "setting or use --force to override."
+ )
if not settings.COMPRESS_OFFLINE:
if not options.get("force"):
raise CommandError(
"Offline compression is disabled. Set "
- "COMPRESS_OFFLINE or use the --force to override.")
+ "COMPRESS_OFFLINE or use the --force to override."
+ )
log = options.get("log", sys.stdout)
verbosity = options.get("verbosity", 1)
@@ -321,7 +395,9 @@ class Command(BaseCommand):
final_block_count = 0
final_results = []
for engine in engines:
- offline_manifest, block_count, results = self.compress(engine, extensions, verbosity, follow_links, log)
+ offline_manifest, block_count, results = self.compress(
+ engine, extensions, verbosity, follow_links, log
+ )
final_results.extend(results)
final_block_count += block_count
final_offline_manifest.update(offline_manifest)
diff --git a/compressor/management/commands/mtime_cache.py b/compressor/management/commands/mtime_cache.py
index d90ebfc..e4ce697 100644
--- a/compressor/management/commands/mtime_cache.py
+++ b/compressor/management/commands/mtime_cache.py
@@ -12,27 +12,38 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
- '-i', '--ignore', action='append', default=[],
- dest='ignore_patterns', metavar='PATTERN',
+ "-i",
+ "--ignore",
+ action="append",
+ default=[],
+ dest="ignore_patterns",
+ metavar="PATTERN",
help="Ignore files or directories matching this glob-style "
- "pattern. Use multiple times to ignore more."),
+ "pattern. Use multiple times to ignore more.",
+ ),
parser.add_argument(
- '--no-default-ignore', action='store_false',
- dest='use_default_ignore_patterns', default=True,
+ "--no-default-ignore",
+ action="store_false",
+ dest="use_default_ignore_patterns",
+ default=True,
help="Don't ignore the common private glob-style patterns 'CVS', "
- "'.*' and '*~'."),
+ "'.*' and '*~'.",
+ ),
parser.add_argument(
- '--follow-links', dest='follow_links', action='store_true',
+ "--follow-links",
+ dest="follow_links",
+ action="store_true",
help="Follow symlinks when traversing the COMPRESS_ROOT "
- "(which defaults to STATIC_ROOT). Be aware that using this "
- "can lead to infinite recursion if a link points to a parent "
- "directory of itself."),
+ "(which defaults to STATIC_ROOT). Be aware that using this "
+ "can lead to infinite recursion if a link points to a parent "
+ "directory of itself.",
+ ),
parser.add_argument(
- '-c', '--clean', dest='clean', action='store_true',
- help="Remove all items"),
+ "-c", "--clean", dest="clean", action="store_true", help="Remove all items"
+ ),
parser.add_argument(
- '-a', '--add', dest='add', action='store_true',
- help="Add all items"),
+ "-a", "--add", dest="add", action="store_true", help="Add all items"
+ ),
def is_ignored(self, path):
"""
@@ -45,47 +56,50 @@ class Command(BaseCommand):
return False
def handle(self, **options):
- ignore_patterns = options['ignore_patterns']
- if options['use_default_ignore_patterns']:
- ignore_patterns += ['CVS', '.*', '*~']
- options['ignore_patterns'] = ignore_patterns
+ ignore_patterns = options["ignore_patterns"]
+ if options["use_default_ignore_patterns"]:
+ ignore_patterns += ["CVS", ".*", "*~"]
+ options["ignore_patterns"] = ignore_patterns
self.ignore_patterns = ignore_patterns
- if ((options['add'] and options['clean'])
- or (not options['add'] and not options['clean'])):
+ if (options["add"] and options["clean"]) or (
+ not options["add"] and not options["clean"]
+ ):
raise CommandError('Please specify either "--add" or "--clean"')
if not settings.COMPRESS_MTIME_DELAY:
raise CommandError(
- 'mtime caching is currently disabled. Please '
- 'set the COMPRESS_MTIME_DELAY setting to a number of seconds.')
+ "mtime caching is currently disabled. Please "
+ "set the COMPRESS_MTIME_DELAY setting to a number of seconds."
+ )
files_to_add = set()
keys_to_delete = set()
- for root, dirs, files in os.walk(settings.COMPRESS_ROOT,
- followlinks=options['follow_links']):
+ for root, dirs, files in os.walk(
+ settings.COMPRESS_ROOT, followlinks=options["follow_links"]
+ ):
for dir_ in dirs:
if self.is_ignored(dir_):
dirs.remove(dir_)
for filename in files:
common = "".join(root.split(settings.COMPRESS_ROOT))
if common.startswith(os.sep):
- common = common[len(os.sep):]
+ common = common[len(os.sep) :]
if self.is_ignored(os.path.join(common, filename)):
continue
filename = os.path.join(root, filename)
keys_to_delete.add(get_mtime_cachekey(filename))
- if options['add']:
+ if options["add"]:
files_to_add.add(filename)
if keys_to_delete:
cache.delete_many(list(keys_to_delete))
- self.stdout.write("Deleted mtimes of %d files from the cache."
- % len(keys_to_delete))
+ self.stdout.write(
+ "Deleted mtimes of %d files from the cache." % len(keys_to_delete)
+ )
if files_to_add:
for filename in files_to_add:
get_mtime(filename)
- self.stdout.write("Added mtimes of %d files to cache."
- % len(files_to_add))
+ self.stdout.write("Added mtimes of %d files to cache." % len(files_to_add))
diff --git a/compressor/offline/django.py b/compressor/offline/django.py
index 1afdb0e..619c130 100644
--- a/compressor/offline/django.py
+++ b/compressor/offline/django.py
@@ -5,7 +5,12 @@ from django.template import Context
from django.template.base import Node, VariableNode, TextNode, NodeList
from django.template.defaulttags import IfNode
from django.template.loader import get_template
-from django.template.loader_tags import BLOCK_CONTEXT_KEY, ExtendsNode, BlockNode, BlockContext
+from django.template.loader_tags import (
+ BLOCK_CONTEXT_KEY,
+ ExtendsNode,
+ BlockNode,
+ BlockContext,
+)
from compressor.exceptions import TemplateSyntaxError, TemplateDoesNotExist
@@ -20,8 +25,9 @@ def handle_extendsnode(extendsnode, context):
if BLOCK_CONTEXT_KEY not in context.render_context:
context.render_context[BLOCK_CONTEXT_KEY] = BlockContext()
block_context = context.render_context[BLOCK_CONTEXT_KEY]
- blocks = dict((n.name, n) for n in
- extendsnode.nodelist.get_nodes_by_type(BlockNode))
+ blocks = dict(
+ (n.name, n) for n in extendsnode.nodelist.get_nodes_by_type(BlockNode)
+ )
block_context.add_blocks(blocks)
compiled_parent = extendsnode.get_parent(context)
@@ -34,8 +40,7 @@ def handle_extendsnode(extendsnode, context):
return handle_extendsnode(node, context)
break
# Add blocks of the root template to block context.
- blocks = dict((n.name, n) for n in
- parent_nodelist.get_nodes_by_type(BlockNode))
+ blocks = dict((n.name, n) for n in parent_nodelist.get_nodes_by_type(BlockNode))
block_context.add_blocks(blocks)
block_stack = []
@@ -48,7 +53,7 @@ def remove_block_nodes(nodelist, block_stack, block_context):
for node in nodelist:
if isinstance(node, VariableNode):
var_name = node.filter_expression.token.strip()
- if var_name == 'block.super':
+ if var_name == "block.super":
if not block_stack:
continue
node = block_context.get_block(block_stack[-1].name)
@@ -61,14 +66,20 @@ def remove_block_nodes(nodelist, block_stack, block_context):
# IfNode has nodelist as a @property so we can not modify it
if isinstance(node, IfNode):
node = copy(node)
- for i, (condition, sub_nodelist) in enumerate(node.conditions_nodelists):
- sub_nodelist = remove_block_nodes(sub_nodelist, block_stack, block_context)
+ for i, (condition, sub_nodelist) in enumerate(
+ node.conditions_nodelists
+ ):
+ sub_nodelist = remove_block_nodes(
+ sub_nodelist, block_stack, block_context
+ )
node.conditions_nodelists[i] = (condition, sub_nodelist)
else:
for attr in node.child_nodelists:
sub_nodelist = getattr(node, attr, None)
if sub_nodelist:
- sub_nodelist = remove_block_nodes(sub_nodelist, block_stack, block_context)
+ sub_nodelist = remove_block_nodes(
+ sub_nodelist, block_stack, block_context
+ )
node = copy(node)
setattr(node, attr, sub_nodelist)
new_nodelist.append(node)
@@ -135,15 +146,16 @@ class DjangoParser:
# and linked issues/PRs for a discussion on the `None) or []` part
nodelist += getattr(node, attr, None) or []
else:
- nodelist = getattr(node, 'nodelist', [])
+ nodelist = getattr(node, "nodelist", [])
return nodelist
def walk_nodes(self, node, original=None, context=None):
if original is None:
original = node
for node in self.get_nodelist(node, original, context):
- if isinstance(node, CompressorNode) \
- and node.is_offline_compression_enabled(forced=True):
+ if isinstance(node, CompressorNode) and node.is_offline_compression_enabled(
+ forced=True
+ ):
yield node
else:
for node in self.walk_nodes(node, original, context):
diff --git a/compressor/offline/jinja2.py b/compressor/offline/jinja2.py
index 03ceaf3..9a27bb8 100644
--- a/compressor/offline/jinja2.py
+++ b/compressor/offline/jinja2.py
@@ -10,7 +10,7 @@ from compressor.exceptions import TemplateSyntaxError, TemplateDoesNotExist
def flatten_context(context):
- if hasattr(context, 'dicts'):
+ if hasattr(context, "dicts"):
context_dict = {}
for d in context.dicts:
@@ -28,14 +28,15 @@ class SpacelessExtension(Extension):
See: https://github.com/django/django/blob/master/django/template/defaulttags.py
"""
- tags = set(['spaceless'])
+ tags = set(["spaceless"])
def parse(self, parser):
lineno = next(parser.stream).lineno
- body = parser.parse_statements(['name:endspaceless'], drop_needle=True)
+ body = parser.parse_statements(["name:endspaceless"], drop_needle=True)
- return nodes.CallBlock(self.call_method('_spaceless', []),
- [], [], body).set_lineno(lineno)
+ return nodes.CallBlock(
+ self.call_method("_spaceless", []), [], [], body
+ ).set_lineno(lineno)
def _spaceless(self, caller):
from django.utils.html import strip_spaces_between_tags
@@ -60,14 +61,14 @@ def url_for(mod, filename):
class Jinja2Parser:
- COMPRESSOR_ID = 'compressor.contrib.jinja2ext.CompressorExtension'
+ COMPRESSOR_ID = "compressor.contrib.jinja2ext.CompressorExtension"
def __init__(self, charset, env):
self.charset = charset
self.env = env
def parse(self, template_name):
- with io.open(template_name, mode='rb') as file:
+ with io.open(template_name, mode="rb") as file:
try:
template = self.env.parse(file.read().decode(self.charset))
except jinja2.TemplateSyntaxError as e:
@@ -117,11 +118,13 @@ class Jinja2Parser:
def walk_nodes(self, node, block_name=None, context=None):
for node in self.get_nodelist(node):
- if (isinstance(node, CallBlock)
- and isinstance(node.call, Call)
- and isinstance(node.call.node, ExtensionAttribute)
- and node.call.node.identifier == self.COMPRESSOR_ID):
- node.call.node.name = '_compress_forced'
+ if (
+ isinstance(node, CallBlock)
+ and isinstance(node.call, Call)
+ and isinstance(node.call.node, ExtensionAttribute)
+ and node.call.node.identifier == self.COMPRESSOR_ID
+ ):
+ node.call.node.name = "_compress_forced"
yield node
else:
for node in self.walk_nodes(node, block_name=block_name):
diff --git a/compressor/parser/__init__.py b/compressor/parser/__init__.py
index 288b031..9ef6069 100644
--- a/compressor/parser/__init__.py
+++ b/compressor/parser/__init__.py
@@ -15,7 +15,7 @@ class AutoSelectParser(LazyObject):
options = (
# TODO: make lxml.html parser first again
(html.parser.__name__, HtmlParser), # fast and part of the Python stdlib
- ('lxml.html', LxmlParser), # lxml, extremely fast
+ ("lxml.html", LxmlParser), # lxml, extremely fast
)
def __init__(self, content):
diff --git a/compressor/parser/base.py b/compressor/parser/base.py
index de44dfa..2abc570 100644
--- a/compressor/parser/base.py
+++ b/compressor/parser/base.py
@@ -2,6 +2,7 @@ class ParserBase:
"""
Base parser to be subclassed when creating an own parser.
"""
+
def __init__(self, content):
self.content = content
diff --git a/compressor/parser/beautifulsoup.py b/compressor/parser/beautifulsoup.py
index f3b1809..9189748 100644
--- a/compressor/parser/beautifulsoup.py
+++ b/compressor/parser/beautifulsoup.py
@@ -5,20 +5,20 @@ from compressor.parser import ParserBase
class BeautifulSoupParser(ParserBase):
-
def __init__(self, content):
super().__init__(content)
try:
from bs4 import BeautifulSoup
+
self.soup = BeautifulSoup(self.content, "html.parser")
except ImportError as err:
raise ImproperlyConfigured("Error while importing BeautifulSoup: %s" % err)
def css_elems(self):
- return self.soup.find_all({'link': True, 'style': True})
+ return self.soup.find_all({"link": True, "style": True})
def js_elems(self):
- return self.soup.find_all('script')
+ return self.soup.find_all("script")
def elem_attribs(self, elem):
attrs = dict(elem.attrs)
@@ -37,7 +37,7 @@ class BeautifulSoupParser(ParserBase):
def elem_str(self, elem):
elem_as_string = smart_str(elem)
- if elem.name == 'link':
+ if elem.name == "link":
# This makes testcases happy
- elem_as_string = elem_as_string.replace('/>', '>')
+ elem_as_string = elem_as_string.replace("/>", ">")
return elem_as_string
diff --git a/compressor/parser/default_htmlparser.py b/compressor/parser/default_htmlparser.py
index e85bc97..1e71dc4 100644
--- a/compressor/parser/default_htmlparser.py
+++ b/compressor/parser/default_htmlparser.py
@@ -9,7 +9,7 @@ from compressor.parser import ParserBase
# The HTMLParser constructor takes a 'convert_charrefs'
# argument which raises a warning if we don't pass it.
HTML_PARSER_ARGS = {
- 'convert_charrefs': False,
+ "convert_charrefs": False,
}
@@ -26,39 +26,35 @@ class DefaultHtmlParser(ParserBase, html.parser.HTMLParser):
except Exception as err:
lineno = err.lineno
line = self.content.splitlines()[lineno]
- raise ParserError("Error while initializing HtmlParser: %s (line: %s)" % (err, repr(line)))
+ raise ParserError(
+ "Error while initializing HtmlParser: %s (line: %s)" % (err, repr(line))
+ )
def handle_starttag(self, tag, attrs):
tag = tag.lower()
- if tag in ('style', 'script'):
- if tag == 'style':
+ if tag in ("style", "script"):
+ if tag == "style":
tags = self._css_elems
- elif tag == 'script':
+ elif tag == "script":
tags = self._js_elems
- tags.append({
- 'tag': tag,
- 'attrs': attrs,
- 'attrs_dict': dict(attrs),
- 'text': ''
- })
+ tags.append(
+ {"tag": tag, "attrs": attrs, "attrs_dict": dict(attrs), "text": ""}
+ )
self._current_tag = tag
- elif tag == 'link':
- self._css_elems.append({
- 'tag': tag,
- 'attrs': attrs,
- 'attrs_dict': dict(attrs),
- 'text': None
- })
+ elif tag == "link":
+ self._css_elems.append(
+ {"tag": tag, "attrs": attrs, "attrs_dict": dict(attrs), "text": None}
+ )
def handle_endtag(self, tag):
if self._current_tag and self._current_tag == tag.lower():
self._current_tag = None
def handle_data(self, data):
- if self._current_tag == 'style':
- self._css_elems[-1]['text'] = data
- elif self._current_tag == 'script':
- self._js_elems[-1]['text'] = data
+ if self._current_tag == "style":
+ self._css_elems[-1]["text"] = data
+ elif self._current_tag == "script":
+ self._js_elems[-1]["text"] = data
def css_elems(self):
return self._css_elems
@@ -67,21 +63,23 @@ class DefaultHtmlParser(ParserBase, html.parser.HTMLParser):
return self._js_elems
def elem_name(self, elem):
- return elem['tag']
+ return elem["tag"]
def elem_attribs(self, elem):
- return elem['attrs_dict']
+ return elem["attrs_dict"]
def elem_content(self, elem):
- return smart_str(elem['text'])
+ return smart_str(elem["text"])
def elem_str(self, elem):
tag = {}
tag.update(elem)
- tag['attrs'] = ''
- if len(elem['attrs']):
- tag['attrs'] = ' %s' % ' '.join(['%s="%s"' % (name, value) for name, value in elem['attrs']])
- if elem['tag'] == 'link':
- return '<%(tag)s%(attrs)s>' % tag
+ tag["attrs"] = ""
+ if len(elem["attrs"]):
+ tag["attrs"] = " %s" % " ".join(
+ ['%s="%s"' % (name, value) for name, value in elem["attrs"]]
+ )
+ if elem["tag"] == "link":
+ return "<%(tag)s%(attrs)s>" % tag
else:
- return '<%(tag)s%(attrs)s>%(text)s</%(tag)s>' % tag
+ return "<%(tag)s%(attrs)s>%(text)s</%(tag)s>" % tag
diff --git a/compressor/parser/html5lib.py b/compressor/parser/html5lib.py
index 0d85b96..0a96750 100644
--- a/compressor/parser/html5lib.py
+++ b/compressor/parser/html5lib.py
@@ -7,15 +7,17 @@ from compressor.parser import ParserBase
class Html5LibParser(ParserBase):
-
def __init__(self, content):
super().__init__(content)
import html5lib
+
self.html5lib = html5lib
def _serialize(self, elem):
return self.html5lib.serialize(
- elem, tree="etree", quote_attr_values="always",
+ elem,
+ tree="etree",
+ quote_attr_values="always",
omit_optional_tags=False,
)
@@ -34,11 +36,12 @@ class Html5LibParser(ParserBase):
raise ParserError("Error while initializing Parser: %s" % err)
def css_elems(self):
- return self._find('{http://www.w3.org/1999/xhtml}link',
- '{http://www.w3.org/1999/xhtml}style')
+ return self._find(
+ "{http://www.w3.org/1999/xhtml}link", "{http://www.w3.org/1999/xhtml}style"
+ )
def js_elems(self):
- return self._find('{http://www.w3.org/1999/xhtml}script')
+ return self._find("{http://www.w3.org/1999/xhtml}script")
def elem_attribs(self, elem):
return elem.attrib
@@ -47,8 +50,8 @@ class Html5LibParser(ParserBase):
return smart_str(elem.text)
def elem_name(self, elem):
- if '}' in elem.tag:
- return elem.tag.split('}')[1]
+ if "}" in elem.tag:
+ return elem.tag.split("}")[1]
return elem.tag
def elem_str(self, elem):
diff --git a/compressor/parser/lxml.py b/compressor/parser/lxml.py
index 6fdbb0e..82d5601 100644
--- a/compressor/parser/lxml.py
+++ b/compressor/parser/lxml.py
@@ -11,6 +11,7 @@ class LxmlParser(ParserBase):
LxmlParser will use `lxml.html` parser to parse rendered contents of
{% compress %} tag.
"""
+
def __init__(self, content):
try:
from lxml.html import fromstring
@@ -29,17 +30,19 @@ class LxmlParser(ParserBase):
"""
Document tree.
"""
- content = '<root>%s</root>' % self.content
+ content = "<root>%s</root>" % self.content
tree = self.fromstring(content)
self.tostring(tree, encoding=str)
return tree
def css_elems(self):
- return self.tree.xpath('//link[re:test(@rel, "^stylesheet$", "i")]|style',
- namespaces={"re": "http://exslt.org/regular-expressions"})
+ return self.tree.xpath(
+ '//link[re:test(@rel, "^stylesheet$", "i")]|style',
+ namespaces={"re": "http://exslt.org/regular-expressions"},
+ )
def js_elems(self):
- return self.tree.findall('script')
+ return self.tree.findall("script")
def elem_attribs(self, elem):
return elem.attrib
@@ -51,4 +54,4 @@ class LxmlParser(ParserBase):
return elem.tag
def elem_str(self, elem):
- return smart_str(self.tostring(elem, method='html', encoding=str))
+ return smart_str(self.tostring(elem, method="html", encoding=str))
diff --git a/compressor/storage.py b/compressor/storage.py
index 1bf58dd..f14ed9d 100644
--- a/compressor/storage.py
+++ b/compressor/storage.py
@@ -18,6 +18,7 @@ class CompressorFileStorage(FileSystemStorage):
``COMPRESS_URL``.
"""
+
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.COMPRESS_ROOT
@@ -47,20 +48,22 @@ class CompressorFileStorage(FileSystemStorage):
compressor_file_storage = SimpleLazyObject(
- lambda: get_storage_class('compressor.storage.CompressorFileStorage')())
+ lambda: get_storage_class("compressor.storage.CompressorFileStorage")()
+)
class GzipCompressorFileStorage(CompressorFileStorage):
"""
File system storage that stores gzipped files in addition to the usual files.
"""
+
def save(self, filename, content):
filename = super().save(filename, content)
orig_path = self.path(filename)
- compressed_path = '%s.gz' % orig_path
+ compressed_path = "%s.gz" % orig_path
- with open(orig_path, 'rb') as f_in, open(compressed_path, 'wb') as f_out:
- with gzip.GzipFile(fileobj=f_out, mode='wb') as gz_out:
+ with open(orig_path, "rb") as f_in, open(compressed_path, "wb") as f_out:
+ with gzip.GzipFile(fileobj=f_out, mode="wb") as gz_out:
gz_out.write(f_in.read())
# Ensure the file timestamps match.
@@ -78,17 +81,19 @@ class BrotliCompressorFileStorage(CompressorFileStorage):
"""
File system storage that stores brotli files in addition to the usual files.
"""
+
chunk_size = 1024
def save(self, filename, content):
filename = super().save(filename, content)
orig_path = self.path(filename)
- compressed_path = '%s.br' % orig_path
+ compressed_path = "%s.br" % orig_path
import brotli
+
br_compressor = brotli.Compressor()
- with open(orig_path, 'rb') as f_in, open(compressed_path, 'wb') as f_out:
- for f_in_data in iter(lambda: f_in.read(self.chunk_size), b''):
+ with open(orig_path, "rb") as f_in, open(compressed_path, "wb") as f_out:
+ for f_in_data in iter(lambda: f_in.read(self.chunk_size), b""):
compressed_data = br_compressor.process(f_in_data)
if not compressed_data:
compressed_data = br_compressor.flush()
@@ -116,7 +121,9 @@ default_storage = DefaultStorage()
class OfflineManifestFileStorage(CompressorFileStorage):
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
- location = os.path.join(settings.COMPRESS_ROOT, settings.COMPRESS_OUTPUT_DIR)
+ location = os.path.join(
+ settings.COMPRESS_ROOT, settings.COMPRESS_OUTPUT_DIR
+ )
if base_url is None:
base_url = urljoin(settings.COMPRESS_URL, settings.COMPRESS_OUTPUT_DIR)
super().__init__(location, base_url, *args, **kwargs)
diff --git a/compressor/templatetags/compress.py b/compressor/templatetags/compress.py
index 8bd5500..57bce48 100644
--- a/compressor/templatetags/compress.py
+++ b/compressor/templatetags/compress.py
@@ -1,22 +1,26 @@
from django import template
from django.core.exceptions import ImproperlyConfigured
-from compressor.cache import (cache_get, cache_set, get_offline_hexdigest,
- get_offline_manifest, get_templatetag_cachekey)
+from compressor.cache import (
+ cache_get,
+ cache_set,
+ get_offline_hexdigest,
+ get_offline_manifest,
+ get_templatetag_cachekey,
+)
from compressor.conf import settings
from compressor.exceptions import OfflineGenerationError
from compressor.utils import get_class
register = template.Library()
-OUTPUT_FILE = 'file'
-OUTPUT_INLINE = 'inline'
-OUTPUT_PRELOAD = 'preload'
+OUTPUT_FILE = "file"
+OUTPUT_INLINE = "inline"
+OUTPUT_PRELOAD = "preload"
OUTPUT_MODES = (OUTPUT_FILE, OUTPUT_INLINE, OUTPUT_PRELOAD)
class CompressorMixin:
-
def get_original_content(self, context):
raise NotImplementedError
@@ -28,9 +32,9 @@ class CompressorMixin:
if kind not in self.compressors.keys():
raise template.TemplateSyntaxError(
"The compress tag's argument must be one of: %s."
- % ', '.join(map(repr, self.compressors.keys())))
- return get_class(self.compressors.get(kind),
- exception=ImproperlyConfigured)
+ % ", ".join(map(repr, self.compressors.keys()))
+ )
+ return get_class(self.compressors.get(kind), exception=ImproperlyConfigured)
def get_compressor(self, context, kind, log, verbosity):
cls = self.compressor_cls(kind)
@@ -39,13 +43,13 @@ class CompressorMixin:
content=self.get_original_content(context),
context=context,
log=log,
- verbosity=verbosity
+ verbosity=verbosity,
)
def debug_mode(self, context):
if settings.COMPRESS_DEBUG_TOGGLE:
# Only check for the debug parameter if a RequestContext was used
- request = context.get('request', None)
+ request = context.get("request", None)
if request is not None:
return settings.COMPRESS_DEBUG_TOGGLE in request.GET
@@ -57,8 +61,7 @@ class CompressorMixin:
but can be overridden to completely disable compression for
a subclass, for instance.
"""
- return (settings.COMPRESS_ENABLED
- and settings.COMPRESS_OFFLINE) or forced
+ return (settings.COMPRESS_ENABLED and settings.COMPRESS_OFFLINE) or forced
def render_offline(self, context):
"""
@@ -75,13 +78,15 @@ class CompressorMixin:
# a string-alike object to e.g. add ``SCRIPT_NAME`` WSGI param
# as a *path prefix* to the output URL.
# See https://code.djangoproject.com/ticket/25598.
- str(settings.COMPRESS_URL)
+ str(settings.COMPRESS_URL),
)
else:
- raise OfflineGenerationError('You have offline compression '
+ raise OfflineGenerationError(
+ "You have offline compression "
'enabled but key "%s" is missing from offline manifest. '
'You may need to run "python manage.py compress". Here '
- 'is the original content:\n\n%s' % (key, original_content))
+ "is the original content:\n\n%s" % (key, original_content)
+ )
def render_cached(self, compressor, kind, mode):
"""
@@ -92,19 +97,24 @@ class CompressorMixin:
cache_content = cache_get(cache_key)
return cache_key, cache_content
- def render_compressed(self, context, kind, mode, name=None, forced=False, log=None, verbosity=0):
+ def render_compressed(
+ self, context, kind, mode, name=None, forced=False, log=None, verbosity=0
+ ):
# See if it has been rendered offline
if self.is_offline_compression_enabled(forced) and not forced:
return self.render_offline(context)
# Take a shortcut if we really don't have anything to do
- if (not settings.COMPRESS_ENABLED
- and not settings.COMPRESS_PRECOMPILERS and not forced):
+ if (
+ not settings.COMPRESS_ENABLED
+ and not settings.COMPRESS_PRECOMPILERS
+ and not forced
+ ):
return self.get_original_content(context)
- name = name or getattr(self, 'name', None)
- context['compressed'] = {'name': name}
+ name = name or getattr(self, "name", None)
+ context["compressed"] = {"name": name}
compressor = self.get_compressor(context, kind, log, verbosity)
# Check cache
@@ -114,9 +124,9 @@ class CompressorMixin:
if cache_content is not None:
return cache_content
- file_basename = name or getattr(self, 'basename', None)
+ file_basename = name or getattr(self, "basename", None)
if file_basename is None:
- file_basename = 'output'
+ file_basename = "output"
rendered_output = compressor.output(mode, forced=forced, basename=file_basename)
assert isinstance(rendered_output, str)
@@ -126,7 +136,6 @@ class CompressorMixin:
class CompressorNode(CompressorMixin, template.Node):
-
def __init__(self, nodelist, kind=None, mode=OUTPUT_FILE, name=None):
self.nodelist = nodelist
self.kind = kind
@@ -148,7 +157,9 @@ class CompressorNode(CompressorMixin, template.Node):
except AttributeError:
log, verbosity = None, 0
- return self.render_compressed(context, self.kind, self.mode, forced=forced, log=log, verbosity=verbosity)
+ return self.render_compressed(
+ context, self.kind, self.mode, forced=forced, log=log, verbosity=verbosity
+ )
@register.tag
@@ -168,14 +179,15 @@ def compress(parser, token):
"""
- nodelist = parser.parse(('endcompress',))
+ nodelist = parser.parse(("endcompress",))
parser.delete_first_token()
args = token.split_contents()
if not len(args) in (2, 3, 4):
raise template.TemplateSyntaxError(
- "%r tag requires either one, two or three arguments." % args[0])
+ "%r tag requires either one, two or three arguments." % args[0]
+ )
kind = args[1]
@@ -183,8 +195,9 @@ def compress(parser, token):
mode = args[2]
if mode not in OUTPUT_MODES:
raise template.TemplateSyntaxError(
- "%r's second argument must be '%s' or '%s'." %
- (args[0], OUTPUT_FILE, OUTPUT_INLINE))
+ "%r's second argument must be '%s' or '%s'."
+ % (args[0], OUTPUT_FILE, OUTPUT_INLINE)
+ )
else:
mode = OUTPUT_FILE
if len(args) == 4:
diff --git a/compressor/test_settings.py b/compressor/test_settings.py
index facf66a..68b0152 100644
--- a/compressor/test_settings.py
+++ b/compressor/test_settings.py
@@ -1,63 +1,64 @@
import os
-TEST_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'tests')
+TEST_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), "tests")
CACHES = {
- 'default': {
- 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
- 'LOCATION': 'unique-snowflake'
+ "default": {
+ "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+ "LOCATION": "unique-snowflake",
}
}
DATABASES = {
- 'default': {
- 'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': ':memory:',
+ "default": {
+ "ENGINE": "django.db.backends.sqlite3",
+ "NAME": ":memory:",
}
}
INSTALLED_APPS = [
- 'django.contrib.staticfiles',
- 'compressor',
- 'sekizai',
+ "django.contrib.staticfiles",
+ "compressor",
+ "sekizai",
]
STATICFILES_FINDERS = [
- 'django.contrib.staticfiles.finders.FileSystemFinder',
- 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
- 'compressor.finders.CompressorFinder',
+ "django.contrib.staticfiles.finders.FileSystemFinder",
+ "django.contrib.staticfiles.finders.AppDirectoriesFinder",
+ "compressor.finders.CompressorFinder",
]
-STATIC_URL = '/static/'
-
-
-STATIC_ROOT = os.path.join(TEST_DIR, 'static')
-
-TEMPLATES = [{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'APP_DIRS': True,
- 'DIRS': [
- # Specifically choose a name that will not be considered
- # by app_directories loader, to make sure each test uses
- # a specific template without considering the others.
- os.path.join(TEST_DIR, 'test_templates'),
- ],
-}, {
- 'BACKEND': 'django.template.backends.jinja2.Jinja2',
- 'APP_DIRS': True,
- 'DIRS': [
- # Specifically choose a name that will not be considered
- # by app_directories loader, to make sure each test uses
- # a specific template without considering the others.
- os.path.join(TEST_DIR, 'test_templates_jinja2'),
- ],
-}]
+STATIC_URL = "/static/"
+
+
+STATIC_ROOT = os.path.join(TEST_DIR, "static")
+
+TEMPLATES = [
+ {
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "APP_DIRS": True,
+ "DIRS": [
+ # Specifically choose a name that will not be considered
+ # by app_directories loader, to make sure each test uses
+ # a specific template without considering the others.
+ os.path.join(TEST_DIR, "test_templates"),
+ ],
+ },
+ {
+ "BACKEND": "django.template.backends.jinja2.Jinja2",
+ "APP_DIRS": True,
+ "DIRS": [
+ # Specifically choose a name that will not be considered
+ # by app_directories loader, to make sure each test uses
+ # a specific template without considering the others.
+ os.path.join(TEST_DIR, "test_templates_jinja2"),
+ ],
+ },
+]
SECRET_KEY = "iufoj=mibkpdz*%bob952x(%49rqgv8gg45k36kjcg76&-y5=!"
-PASSWORD_HASHERS = (
- 'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
-)
+PASSWORD_HASHERS = ("django.contrib.auth.hashers.UnsaltedMD5PasswordHasher",)
MIDDLEWARE_CLASSES = []
diff --git a/compressor/tests/precompiler.py b/compressor/tests/precompiler.py
index bf82593..5c66d41 100644
--- a/compressor/tests/precompiler.py
+++ b/compressor/tests/precompiler.py
@@ -5,12 +5,24 @@ import sys
def main():
p = optparse.OptionParser()
- p.add_option('-f', '--file', action="store",
- type="string", dest="filename",
- help="File to read from, defaults to stdin", default=None)
- p.add_option('-o', '--output', action="store",
- type="string", dest="outfile",
- help="File to write to, defaults to stdout", default=None)
+ p.add_option(
+ "-f",
+ "--file",
+ action="store",
+ type="string",
+ dest="filename",
+ help="File to read from, defaults to stdin",
+ default=None,
+ )
+ p.add_option(
+ "-o",
+ "--output",
+ action="store",
+ type="string",
+ dest="outfile",
+ help="File to write to, defaults to stdout",
+ default=None,
+ )
options, arguments = p.parse_args()
@@ -20,14 +32,14 @@ def main():
else:
content = sys.stdin.read()
- content = content.replace('background:', 'color:')
+ content = content.replace("background:", "color:")
if options.outfile:
- with open(options.outfile, 'w') as f:
+ with open(options.outfile, "w") as f:
f.write(content)
else:
print(content)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/compressor/tests/test_base.py b/compressor/tests/test_base.py
index 0ff1d9a..24280e1 100644
--- a/compressor/tests/test_base.py
+++ b/compressor/tests/test_base.py
@@ -25,25 +25,25 @@ def make_soup(markup):
def css_tag(href, **kwargs):
- rendered_attrs = ''.join([' %s="%s"' % (k, v) for k, v in kwargs.items()])
+ rendered_attrs = "".join([' %s="%s"' % (k, v) for k, v in kwargs.items()])
template = '<link rel="stylesheet" href="%s" type="text/css"%s>'
return template % (href, rendered_attrs)
class TestPrecompiler:
- """A filter whose output is always the string 'OUTPUT' """
- def __init__(self, content, attrs, filter_type=None, filename=None,
- charset=None):
+ """A filter whose output is always the string 'OUTPUT'"""
+
+ def __init__(self, content, attrs, filter_type=None, filename=None, charset=None):
pass
def input(self, **kwargs):
- return 'OUTPUT'
+ return "OUTPUT"
class PassthroughPrecompiler:
- """A filter whose outputs the input unmodified """
- def __init__(self, content, attrs, filter_type=None, filename=None,
- charset=None):
+ """A filter whose outputs the input unmodified"""
+
+ def __init__(self, content, attrs, filter_type=None, filename=None, charset=None):
self.content = content
def input(self, **kwargs):
@@ -54,23 +54,36 @@ test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__)))
class PrecompilerAndAbsoluteFilterTestCase(SimpleTestCase):
-
def setUp(self):
self.html_orig = '<link rel="stylesheet" href="/static/css/relative_url.css" type="text/css" />'
self.html_auto_close_removed = '<link rel="stylesheet" href="/static/css/relative_url.css" type="text/css">'
self.html_link_to_precompiled_css = '<link rel="stylesheet" href="/static/CACHE/css/relative_url.e8602322bfa6.css" type="text/css">'
self.html_link_to_absolutized_css = '<link rel="stylesheet" href="/static/CACHE/css/relative_url.376db5682982.css" type="text/css">'
- self.css_orig = "p { background: url('../img/python.png'); }" # content of relative_url.css
- self.css_absolutized = "p { background: url('/static/img/python.png?ccb38978f900'); }"
+ self.css_orig = (
+ "p { background: url('../img/python.png'); }" # content of relative_url.css
+ )
+ self.css_absolutized = (
+ "p { background: url('/static/img/python.png?ccb38978f900'); }"
+ )
def helper(self, enabled, use_precompiler, use_absolute_filter, expected_output):
- precompiler = (('text/css', 'compressor.tests.test_base.PassthroughPrecompiler'),) if use_precompiler else ()
- filters = ('compressor.filters.css_default.CssAbsoluteFilter',) if use_absolute_filter else ()
+ precompiler = (
+ (("text/css", "compressor.tests.test_base.PassthroughPrecompiler"),)
+ if use_precompiler
+ else ()
+ )
+ filters = (
+ ("compressor.filters.css_default.CssAbsoluteFilter",)
+ if use_absolute_filter
+ else ()
+ )
- with self.settings(COMPRESS_ENABLED=enabled,
- COMPRESS_PRECOMPILERS=precompiler,
- COMPRESS_FILTERS={'css': filters}):
- css_node = CssCompressor('css', self.html_orig)
+ with self.settings(
+ COMPRESS_ENABLED=enabled,
+ COMPRESS_PRECOMPILERS=precompiler,
+ COMPRESS_FILTERS={"css": filters},
+ ):
+ css_node = CssCompressor("css", self.html_orig)
output = list(css_node.hunks())[0]
self.assertEqual(output, expected_output)
@@ -82,42 +95,83 @@ class PrecompilerAndAbsoluteFilterTestCase(SimpleTestCase):
in the filters setting.
While at it, ensure that everything runs as expected when compression is enabled.
"""
- self.helper(enabled=False, use_precompiler=False, use_absolute_filter=False, expected_output=self.html_auto_close_removed)
- self.helper(enabled=False, use_precompiler=False, use_absolute_filter=True, expected_output=self.html_auto_close_removed)
- self.helper(enabled=False, use_precompiler=True, use_absolute_filter=False, expected_output=self.html_link_to_precompiled_css)
- self.helper(enabled=False, use_precompiler=True, use_absolute_filter=True, expected_output=self.html_link_to_absolutized_css)
- self.helper(enabled=True, use_precompiler=False, use_absolute_filter=False, expected_output=self.css_orig)
- self.helper(enabled=True, use_precompiler=False, use_absolute_filter=True, expected_output=self.css_absolutized)
- self.helper(enabled=True, use_precompiler=True, use_absolute_filter=False, expected_output=self.css_orig)
- self.helper(enabled=True, use_precompiler=True, use_absolute_filter=True, expected_output=self.css_absolutized)
+ self.helper(
+ enabled=False,
+ use_precompiler=False,
+ use_absolute_filter=False,
+ expected_output=self.html_auto_close_removed,
+ )
+ self.helper(
+ enabled=False,
+ use_precompiler=False,
+ use_absolute_filter=True,
+ expected_output=self.html_auto_close_removed,
+ )
+ self.helper(
+ enabled=False,
+ use_precompiler=True,
+ use_absolute_filter=False,
+ expected_output=self.html_link_to_precompiled_css,
+ )
+ self.helper(
+ enabled=False,
+ use_precompiler=True,
+ use_absolute_filter=True,
+ expected_output=self.html_link_to_absolutized_css,
+ )
+ self.helper(
+ enabled=True,
+ use_precompiler=False,
+ use_absolute_filter=False,
+ expected_output=self.css_orig,
+ )
+ self.helper(
+ enabled=True,
+ use_precompiler=False,
+ use_absolute_filter=True,
+ expected_output=self.css_absolutized,
+ )
+ self.helper(
+ enabled=True,
+ use_precompiler=True,
+ use_absolute_filter=False,
+ expected_output=self.css_orig,
+ )
+ self.helper(
+ enabled=True,
+ use_precompiler=True,
+ use_absolute_filter=True,
+ expected_output=self.css_absolutized,
+ )
@override_settings(
COMPRESS_ENABLED=True,
COMPRESS_PRECOMPILERS=(),
- COMPRESS_DEBUG_TOGGLE='nocompress',
+ COMPRESS_DEBUG_TOGGLE="nocompress",
)
class CompressorTestCase(SimpleTestCase):
-
def setUp(self):
self.css = """\
<link rel="stylesheet" href="/static/css/one.css" type="text/css">
<style type="text/css">p { border:5px solid green;}</style>
<link rel="stylesheet" href="/static/css/two.css" type="text/css">"""
- self.css_node = CssCompressor('css', self.css)
+ self.css_node = CssCompressor("css", self.css)
self.js = """\
<script src="/static/js/one.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>"""
- self.js_node = JsCompressor('js', self.js)
+ self.js_node = JsCompressor("js", self.js)
def assertEqualCollapsed(self, a, b):
"""
assertEqual with internal newlines collapsed to single, and
trailing whitespace removed.
"""
+
def collapse(s):
- return re.sub(r'\n+', '\n', s).rstrip()
+ return re.sub(r"\n+", "\n", s).rstrip()
+
self.assertEqual(collapse(a), collapse(b))
def assertEqualSplits(self, a, b):
@@ -125,27 +179,30 @@ class CompressorTestCase(SimpleTestCase):
assertEqual for splits, particularly ignoring the presence of
a trailing newline on the content.
"""
+
def mangle(split):
return [(x[0], x[1], x[2], x[3].rstrip()) for x in split]
+
self.assertEqual(mangle(a), mangle(b))
def test_css_split(self):
out = [
(
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'one.css'),
- 'css/one.css', '<link rel="stylesheet" href="/static/css/one.css" type="text/css">',
+ os.path.join(settings.COMPRESS_ROOT, "css", "one.css"),
+ "css/one.css",
+ '<link rel="stylesheet" href="/static/css/one.css" type="text/css">',
),
(
SOURCE_HUNK,
- 'p { border:5px solid green;}',
+ "p { border:5px solid green;}",
None,
'<style type="text/css">p { border:5px solid green;}</style>',
),
(
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'two.css'),
- 'css/two.css',
+ os.path.join(settings.COMPRESS_ROOT, "css", "two.css"),
+ "css/two.css",
'<link rel="stylesheet" href="/static/css/two.css" type="text/css">',
),
]
@@ -154,39 +211,48 @@ class CompressorTestCase(SimpleTestCase):
self.assertEqualSplits(split, out)
def test_css_hunks(self):
- out = ['body { background:#990; }', 'p { border:5px solid green;}', 'body { color:#fff; }']
+ out = [
+ "body { background:#990; }",
+ "p { border:5px solid green;}",
+ "body { color:#fff; }",
+ ]
self.assertEqual(out, list(self.css_node.hunks()))
def test_css_output(self):
- out = 'body { background:#990; }\np { border:5px solid green;}\nbody { color:#fff; }'
- hunks = '\n'.join([h for h in self.css_node.hunks()])
+ out = "body { background:#990; }\np { border:5px solid green;}\nbody { color:#fff; }"
+ hunks = "\n".join([h for h in self.css_node.hunks()])
self.assertEqual(out, hunks)
def test_css_output_with_bom_input(self):
- out = 'body { background:#990; }\n.compress-test {color: red;}'
- css = ("""<link rel="stylesheet" href="/static/css/one.css" type="text/css" />
- <link rel="stylesheet" href="/static/css/utf-8_with-BOM.css" type="text/css" />""")
- css_node_with_bom = CssCompressor('css', css)
- hunks = '\n'.join([h for h in css_node_with_bom.hunks()])
+ out = "body { background:#990; }\n.compress-test {color: red;}"
+ css = """<link rel="stylesheet" href="/static/css/one.css" type="text/css" />
+ <link rel="stylesheet" href="/static/css/utf-8_with-BOM.css" type="text/css" />"""
+ css_node_with_bom = CssCompressor("css", css)
+ hunks = "\n".join([h for h in css_node_with_bom.hunks()])
self.assertEqual(out, hunks)
def test_css_mtimes(self):
- is_date = re.compile(r'^\d{10}[\.\d]+$')
+ is_date = re.compile(r"^\d{10}[\.\d]+$")
for date in self.css_node.mtimes:
- self.assertTrue(is_date.match(str(float(date))),
- "mtimes is returning something that doesn't look like a date: %s" % date)
+ self.assertTrue(
+ is_date.match(str(float(date))),
+ "mtimes is returning something that doesn't look like a date: %s"
+ % date,
+ )
@override_settings(COMPRESS_ENABLED=False)
def test_css_return_if_off(self):
self.assertEqualCollapsed(self.css, self.css_node.output())
def test_cachekey(self):
- is_cachekey = re.compile(r'\w{12}')
- self.assertTrue(is_cachekey.match(self.css_node.cachekey),
- r"cachekey is returning something that doesn't look like r'\w{12}'")
+ is_cachekey = re.compile(r"\w{12}")
+ self.assertTrue(
+ is_cachekey.match(self.css_node.cachekey),
+ r"cachekey is returning something that doesn't look like r'\w{12}'",
+ )
def test_css_return_if_on(self):
- output = css_tag('/static/CACHE/css/600674ea1d3d.css')
+ output = css_tag("/static/CACHE/css/600674ea1d3d.css")
self.assertEqual(output, self.css_node.output().strip())
def test_css_preload_output(self):
@@ -198,8 +264,8 @@ class CompressorTestCase(SimpleTestCase):
out = [
(
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'js', 'one.js'),
- 'js/one.js',
+ os.path.join(settings.COMPRESS_ROOT, "js", "one.js"),
+ "js/one.js",
'<script src="/static/js/one.js" type="text/javascript"></script>',
),
(
@@ -214,7 +280,7 @@ class CompressorTestCase(SimpleTestCase):
self.assertEqualSplits(split, out)
def test_js_hunks(self):
- out = ['obj = {};', 'obj.value = "value";']
+ out = ["obj = {};", 'obj.value = "value";']
self.assertEqual(out, list(self.js_node.hunks()))
def test_js_output(self):
@@ -223,17 +289,19 @@ class CompressorTestCase(SimpleTestCase):
def test_js_preload_output(self):
# this needs to have the same hash as in the test above
- out = '<link rel="preload" href="/static/CACHE/js/8a0fed36c317.js" as="script" />'
+ out = (
+ '<link rel="preload" href="/static/CACHE/js/8a0fed36c317.js" as="script" />'
+ )
self.assertEqual(out, self.js_node.output(mode="preload"))
def test_js_override_url(self):
- self.js_node.context.update({'url': 'This is not a url, just a text'})
+ self.js_node.context.update({"url": "This is not a url, just a text"})
out = '<script src="/static/CACHE/js/8a0fed36c317.js"></script>'
self.assertEqual(out, self.js_node.output())
def test_css_override_url(self):
- self.css_node.context.update({'url': 'This is not a url, just a text'})
- output = css_tag('/static/CACHE/css/600674ea1d3d.css')
+ self.css_node.context.update({"url": "This is not a url, just a text"})
+ output = css_tag("/static/CACHE/css/600674ea1d3d.css")
self.assertEqual(output, self.css_node.output().strip())
@override_settings(COMPRESS_PRECOMPILERS=(), COMPRESS_ENABLED=False)
@@ -244,54 +312,64 @@ class CompressorTestCase(SimpleTestCase):
output = '<script src="/static/CACHE/js/8a0fed36c317.js"></script>'
self.assertEqual(output, self.js_node.output())
- @override_settings(COMPRESS_OUTPUT_DIR='custom')
+ @override_settings(COMPRESS_OUTPUT_DIR="custom")
def test_custom_output_dir1(self):
output = '<script src="/static/custom/js/8a0fed36c317.js"></script>'
- self.assertEqual(output, JsCompressor('js', self.js).output())
+ self.assertEqual(output, JsCompressor("js", self.js).output())
- @override_settings(COMPRESS_OUTPUT_DIR='')
+ @override_settings(COMPRESS_OUTPUT_DIR="")
def test_custom_output_dir2(self):
output = '<script src="/static/js/8a0fed36c317.js"></script>'
- self.assertEqual(output, JsCompressor('js', self.js).output())
+ self.assertEqual(output, JsCompressor("js", self.js).output())
- @override_settings(COMPRESS_OUTPUT_DIR='/custom/nested/')
+ @override_settings(COMPRESS_OUTPUT_DIR="/custom/nested/")
def test_custom_output_dir3(self):
output = '<script src="/static/custom/nested/js/8a0fed36c317.js"></script>'
- self.assertEqual(output, JsCompressor('js', self.js).output())
-
- @override_settings(COMPRESS_PRECOMPILERS=(
- ('text/foobar', 'compressor.tests.test_base.TestPrecompiler'),
- ), COMPRESS_ENABLED=True)
+ self.assertEqual(output, JsCompressor("js", self.js).output())
+
+ @override_settings(
+ COMPRESS_PRECOMPILERS=(
+ ("text/foobar", "compressor.tests.test_base.TestPrecompiler"),
+ ),
+ COMPRESS_ENABLED=True,
+ )
def test_precompiler_class_used(self):
css = '<style type="text/foobar">p { border:10px solid red;}</style>'
- css_node = CssCompressor('css', css)
- output = make_soup(css_node.output('inline'))
- self.assertEqual(output.style.contents[0], 'OUTPUT')
-
- @override_settings(COMPRESS_PRECOMPILERS=(
- ('text/foobar', 'compressor.tests.test_base.NonexistentFilter'),
- ), COMPRESS_ENABLED=True)
+ css_node = CssCompressor("css", css)
+ output = make_soup(css_node.output("inline"))
+ self.assertEqual(output.style.contents[0], "OUTPUT")
+
+ @override_settings(
+ COMPRESS_PRECOMPILERS=(
+ ("text/foobar", "compressor.tests.test_base.NonexistentFilter"),
+ ),
+ COMPRESS_ENABLED=True,
+ )
def test_nonexistent_precompiler_class_error(self):
css = '<style type="text/foobar">p { border:10px solid red;}</style>'
- css_node = CssCompressor('css', css)
- self.assertRaises(FilterDoesNotExist, css_node.output, 'inline')
+ css_node = CssCompressor("css", css)
+ self.assertRaises(FilterDoesNotExist, css_node.output, "inline")
- @override_settings(COMPRESS_PRECOMPILERS=(
- ('text/foobar', './foo -I ./bar/baz'),
- ), COMPRESS_ENABLED=True)
+ @override_settings(
+ COMPRESS_PRECOMPILERS=(("text/foobar", "./foo -I ./bar/baz"),),
+ COMPRESS_ENABLED=True,
+ )
def test_command_with_dot_precompiler(self):
css = '<style type="text/foobar">p { border:10px solid red;}</style>'
- css_node = CssCompressor('css', css)
- self.assertRaises(FilterError, css_node.output, 'inline')
-
- @override_settings(COMPRESS_PRECOMPILERS=(
- ('text/django', 'compressor.filters.template.TemplateFilter'),
- ), COMPRESS_ENABLED=True)
+ css_node = CssCompressor("css", css)
+ self.assertRaises(FilterError, css_node.output, "inline")
+
+ @override_settings(
+ COMPRESS_PRECOMPILERS=(
+ ("text/django", "compressor.filters.template.TemplateFilter"),
+ ),
+ COMPRESS_ENABLED=True,
+ )
def test_template_precompiler(self):
css = '<style type="text/django">p { border:10px solid {% if 1 %}green{% else %}red{% endif %};}</style>'
- css_node = CssCompressor('css', css)
- output = make_soup(css_node.output('inline'))
- self.assertEqual(output.style.contents[0], 'p{border:10px solid green}')
+ css_node = CssCompressor("css", css)
+ output = make_soup(css_node.output("inline"))
+ self.assertEqual(output.style.contents[0], "p{border:10px solid green}")
class CssMediaTestCase(SimpleTestCase):
@@ -303,33 +381,46 @@ class CssMediaTestCase(SimpleTestCase):
<style type="text/css">h1 { border:5px solid green;}</style>"""
def test_css_output(self):
- css_node = CssCompressor('css', self.css)
- links = make_soup(css_node.output()).find_all('link')
- media = ['screen', 'print', 'all', None]
+ css_node = CssCompressor("css", self.css)
+ links = make_soup(css_node.output()).find_all("link")
+ media = ["screen", "print", "all", None]
self.assertEqual(len(links), 4)
- self.assertEqual(media, [link.get('media', None) for link in links])
+ self.assertEqual(media, [link.get("media", None) for link in links])
def test_avoid_reordering_css(self):
- css = self.css + '<style type="text/css" media="print">p { border:10px solid red;}</style>'
- css_node = CssCompressor('css', css)
- media = ['screen', 'print', 'all', None, 'print']
- links = make_soup(css_node.output()).find_all('link')
- self.assertEqual(media, [link.get('media', None) for link in links])
-
- @override_settings(COMPRESS_PRECOMPILERS=(
- ('text/foobar', '%s %s {infile} {outfile}' % (sys.executable, os.path.join(test_dir, 'precompiler.py'))),
- ), COMPRESS_ENABLED=False)
+ css = (
+ self.css
+ + '<style type="text/css" media="print">p { border:10px solid red;}</style>'
+ )
+ css_node = CssCompressor("css", css)
+ media = ["screen", "print", "all", None, "print"]
+ links = make_soup(css_node.output()).find_all("link")
+ self.assertEqual(media, [link.get("media", None) for link in links])
+
+ @override_settings(
+ COMPRESS_PRECOMPILERS=(
+ (
+ "text/foobar",
+ "%s %s {infile} {outfile}"
+ % (sys.executable, os.path.join(test_dir, "precompiler.py")),
+ ),
+ ),
+ COMPRESS_ENABLED=False,
+ )
def test_passthough_when_compress_disabled(self):
css = """\
<link rel="stylesheet" href="/static/css/one.css" type="text/css" media="screen">
<link rel="stylesheet" href="/static/css/two.css" type="text/css" media="screen">
<style type="text/foobar" media="screen">h1 { border:5px solid green;}</style>"""
- css_node = CssCompressor('css', css)
- output = make_soup(css_node.output()).find_all(['link', 'style'])
- self.assertEqual(['/static/css/one.css', '/static/css/two.css', None],
- [link.get('href', None) for link in output])
- self.assertEqual(['screen', 'screen', 'screen'],
- [link.get('media', None) for link in output])
+ css_node = CssCompressor("css", css)
+ output = make_soup(css_node.output()).find_all(["link", "style"])
+ self.assertEqual(
+ ["/static/css/one.css", "/static/css/two.css", None],
+ [link.get("href", None) for link in output],
+ )
+ self.assertEqual(
+ ["screen", "screen", "screen"], [link.get("media", None) for link in output]
+ )
@override_settings(COMPRESS_VERBOSE=True)
@@ -338,9 +429,9 @@ class VerboseTestCase(CompressorTestCase):
class CacheBackendTestCase(CompressorTestCase):
-
def test_correct_backend(self):
from compressor.cache import cache
+
self.assertEqual(cache.__class__, locmem.LocMemCache)
@@ -357,13 +448,14 @@ class JsAsyncDeferTestCase(SimpleTestCase):
def test_js_output(self):
def extract_attr(tag):
- if tag.has_attr('async'):
- return 'async'
- if tag.has_attr('defer'):
- return 'defer'
- js_node = JsCompressor('js', self.js)
- output = [None, 'async', 'defer', None, 'async', None]
- scripts = make_soup(js_node.output()).find_all('script')
+ if tag.has_attr("async"):
+ return "async"
+ if tag.has_attr("defer"):
+ return "defer"
+
+ js_node = JsCompressor("js", self.js)
+ output = [None, "async", "defer", None, "async", None]
+ scripts = make_soup(js_node.output()).find_all("script")
attrs = [extract_attr(s) for s in scripts]
self.assertEqual(output, attrs)
@@ -376,26 +468,25 @@ class JSWithParensTestCase(SimpleTestCase):
"""
def test_js_content(self):
- js_node = JsCompressor('js', self.js)
+ js_node = JsCompressor("js", self.js)
content = js_node.filter_input()
- self.assertEqual(content[0], 'obj = {};;')
- self.assertEqual(content[1], 'pollos = {};')
+ self.assertEqual(content[0], "obj = {};;")
+ self.assertEqual(content[1], "pollos = {};")
class CacheTestCase(SimpleTestCase):
-
def setUp(self):
cachemod._cachekey_func = None
def test_get_cachekey_basic(self):
self.assertEqual(get_cachekey("foo"), "django_compressor.foo")
- @override_settings(COMPRESS_CACHE_KEY_FUNCTION='.leading.dot')
+ @override_settings(COMPRESS_CACHE_KEY_FUNCTION=".leading.dot")
def test_get_cachekey_leading_dot(self):
self.assertRaises(ImportError, lambda: get_cachekey("foo"))
- @override_settings(COMPRESS_CACHE_KEY_FUNCTION='invalid.module')
+ @override_settings(COMPRESS_CACHE_KEY_FUNCTION="invalid.module")
def test_get_cachekey_invalid_mod(self):
self.assertRaises(ImportError, lambda: get_cachekey("foo"))
@@ -407,10 +498,11 @@ class CacheTestCase(SimpleTestCase):
class CompressorInDebugModeTestCase(SimpleTestCase):
-
def setUp(self):
- self.css = '<link rel="stylesheet" href="/static/css/one.css" type="text/css" />'
- self.expected_css_hash = '5c6a60375256'
+ self.css = (
+ '<link rel="stylesheet" href="/static/css/one.css" type="text/css" />'
+ )
+ self.expected_css_hash = "5c6a60375256"
self.tmpdir = mkdtemp()
new_static_root = os.path.join(self.tmpdir, "static")
copytree(settings.STATIC_ROOT, new_static_root)
@@ -418,11 +510,11 @@ class CompressorInDebugModeTestCase(SimpleTestCase):
self.override_settings = self.settings(
COMPRESS_ENABLED=True,
COMPRESS_PRECOMPILERS=(),
- COMPRESS_DEBUG_TOGGLE='nocompress',
+ COMPRESS_DEBUG_TOGGLE="nocompress",
DEBUG=True,
STATIC_ROOT=new_static_root,
COMPRESS_ROOT=new_static_root,
- STATICFILES_DIRS=[settings.COMPRESS_ROOT]
+ STATICFILES_DIRS=[settings.COMPRESS_ROOT],
)
self.override_settings.__enter__()
@@ -436,8 +528,11 @@ class CompressorInDebugModeTestCase(SimpleTestCase):
# files can be outdated. So compressor's output shouldn't change from
# the one pre-generated if we modify the file in STATIC_ROOT.
def compare():
- expected = '<link rel="stylesheet" href="/static/CACHE/css/%s.css" type="text/css">' % self.expected_css_hash
- compressor = CssCompressor('css', self.css)
+ expected = (
+ '<link rel="stylesheet" href="/static/CACHE/css/%s.css" type="text/css">'
+ % self.expected_css_hash
+ )
+ compressor = CssCompressor("css", self.css)
compressor.storage = DefaultStorage()
output = compressor.output()
self.assertEqual(expected, output)
@@ -452,8 +547,8 @@ class CompressorInDebugModeTestCase(SimpleTestCase):
compare()
result_filename = os.path.join(
- settings.COMPRESS_ROOT, "CACHE", "css",
- "%s.css" % self.expected_css_hash)
+ settings.COMPRESS_ROOT, "CACHE", "css", "%s.css" % self.expected_css_hash
+ )
with open(result_filename, "r") as f:
result = f.read()
self.assertTrue(test_css_content not in result)
diff --git a/compressor/tests/test_conf.py b/compressor/tests/test_conf.py
index 8de1e33..de336f5 100644
--- a/compressor/tests/test_conf.py
+++ b/compressor/tests/test_conf.py
@@ -5,10 +5,10 @@ from compressor.conf import CompressorConf
default_css_filters = [
- 'compressor.filters.css_default.CssAbsoluteFilter',
- 'compressor.filters.cssmin.rCSSMinFilter'
+ "compressor.filters.css_default.CssAbsoluteFilter",
+ "compressor.filters.cssmin.rCSSMinFilter",
]
-default_js_filters = ['compressor.filters.jsmin.rJSMinFilter']
+default_js_filters = ["compressor.filters.jsmin.rJSMinFilter"]
def create_conf(**attrs):
@@ -16,22 +16,19 @@ def create_conf(**attrs):
# its configuration to be resolved.
# We use this to force the CompressorConf to be re-resolved,
# when we've changed the settings.
- attrs['__module__'] = None
- return type(
- 'TestCompressorConf',
- (CompressorConf, ),
- attrs)
+ attrs["__module__"] = None
+ return type("TestCompressorConf", (CompressorConf,), attrs)
class ConfTestCase(SimpleTestCase):
def test_filter_defaults(self):
# This used the settings from compressor/test_settings.py
# which contains no values for filers and therefore uses the defaults.
- self.assertEqual(settings.COMPRESS_FILTERS['css'], default_css_filters)
- self.assertEqual(settings.COMPRESS_FILTERS['js'], default_js_filters)
+ self.assertEqual(settings.COMPRESS_FILTERS["css"], default_css_filters)
+ self.assertEqual(settings.COMPRESS_FILTERS["js"], default_js_filters)
- @override_settings(COMPRESS_FILTERS=dict(css=['ham'], js=['spam']))
+ @override_settings(COMPRESS_FILTERS=dict(css=["ham"], js=["spam"]))
def test_filters_by_main_setting(self):
conf = create_conf()
- self.assertEqual(conf.FILTERS['css'], ['ham'])
- self.assertEqual(conf.FILTERS['js'], ['spam'])
+ self.assertEqual(conf.FILTERS["css"], ["ham"])
+ self.assertEqual(conf.FILTERS["js"], ["spam"])
diff --git a/compressor/tests/test_filters.py b/compressor/tests/test_filters.py
index ddcfab0..e93b073 100644
--- a/compressor/tests/test_filters.py
+++ b/compressor/tests/test_filters.py
@@ -23,84 +23,116 @@ from compressor.tests.test_base import test_dir
def blankdict(*args, **kwargs):
- return defaultdict(lambda: '', *args, **kwargs)
+ return defaultdict(lambda: "", *args, **kwargs)
-@override_settings(COMPRESS_CACHEABLE_PRECOMPILERS=('text/css',))
+@override_settings(COMPRESS_CACHEABLE_PRECOMPILERS=("text/css",))
class PrecompilerTestCase(TestCase):
- CHARSET = 'utf-8'
+ CHARSET = "utf-8"
def setUp(self):
- self.test_precompiler = os.path.join(test_dir, 'precompiler.py')
+ self.test_precompiler = os.path.join(test_dir, "precompiler.py")
self.setup_infile()
self.cached_precompiler_args = dict(
- content=self.content, charset=self.CHARSET,
- filename=self.filename, mimetype='text/css')
+ content=self.content,
+ charset=self.CHARSET,
+ filename=self.filename,
+ mimetype="text/css",
+ )
- def setup_infile(self, filename='static/css/one.css'):
+ def setup_infile(self, filename="static/css/one.css"):
self.filename = os.path.join(test_dir, filename)
with io.open(self.filename, encoding=self.CHARSET) as file:
self.content = file.read()
def test_precompiler_dict_options(self):
command = "%s %s {option}" % (sys.executable, self.test_precompiler)
- option = ("option", "option",)
+ option = (
+ "option",
+ "option",
+ )
CompilerFilter.options = dict([option])
compiler = CompilerFilter(
- content=self.content, filename=self.filename,
- charset=self.CHARSET, command=command)
+ content=self.content,
+ filename=self.filename,
+ charset=self.CHARSET,
+ command=command,
+ )
self.assertIn(option, compiler.options)
def test_precompiler_infile_outfile(self):
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
compiler = CompilerFilter(
- content=self.content, filename=self.filename,
- charset=self.CHARSET, command=command)
+ content=self.content,
+ filename=self.filename,
+ charset=self.CHARSET,
+ command=command,
+ )
self.assertEqual("body { color:#990; }", compiler.input())
def test_precompiler_infile_with_spaces(self):
- self.setup_infile('static/css/filename with spaces.css')
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
+ self.setup_infile("static/css/filename with spaces.css")
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
compiler = CompilerFilter(
- content=self.content, filename=self.filename,
- charset=self.CHARSET, command=command)
+ content=self.content,
+ filename=self.filename,
+ charset=self.CHARSET,
+ command=command,
+ )
self.assertEqual("body { color:#424242; }", compiler.input())
def test_precompiler_infile_stdout(self):
- command = '%s %s -f {infile}' % (sys.executable, self.test_precompiler)
+ command = "%s %s -f {infile}" % (sys.executable, self.test_precompiler)
compiler = CompilerFilter(
- content=self.content, filename=None, charset=None, command=command)
+ content=self.content, filename=None, charset=None, command=command
+ )
self.assertEqual("body { color:#990; }%s" % os.linesep, compiler.input())
def test_precompiler_stdin_outfile(self):
- command = '%s %s -o {outfile}' % (sys.executable, self.test_precompiler)
+ command = "%s %s -o {outfile}" % (sys.executable, self.test_precompiler)
compiler = CompilerFilter(
- content=self.content, filename=None, charset=None, command=command)
+ content=self.content, filename=None, charset=None, command=command
+ )
self.assertEqual("body { color:#990; }", compiler.input())
def test_precompiler_stdin_stdout(self):
- command = '%s %s' % (sys.executable, self.test_precompiler)
+ command = "%s %s" % (sys.executable, self.test_precompiler)
compiler = CompilerFilter(
- content=self.content, filename=None, charset=None, command=command)
+ content=self.content, filename=None, charset=None, command=command
+ )
self.assertEqual("body { color:#990; }%s" % os.linesep, compiler.input())
def test_precompiler_stdin_stdout_filename(self):
- command = '%s %s' % (sys.executable, self.test_precompiler)
+ command = "%s %s" % (sys.executable, self.test_precompiler)
compiler = CompilerFilter(
- content=self.content, filename=self.filename,
- charset=self.CHARSET, command=command)
+ content=self.content,
+ filename=self.filename,
+ charset=self.CHARSET,
+ command=command,
+ )
self.assertEqual("body { color:#990; }%s" % os.linesep, compiler.input())
def test_precompiler_output_unicode(self):
- command = '%s %s' % (sys.executable, self.test_precompiler)
- compiler = CompilerFilter(content=self.content, filename=self.filename, command=command)
+ command = "%s %s" % (sys.executable, self.test_precompiler)
+ compiler = CompilerFilter(
+ content=self.content, filename=self.filename, command=command
+ )
self.assertEqual(type(compiler.input()), str)
def test_precompiler_cache(self):
# The cache may already have data in it depending on the order the tests are
# run, so start by clearing it:
cache.clear()
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
self.assertEqual("body { color:#990; }", compiler.input())
# We tell whether the precompiler actually ran by inspecting compiler.infile. If not None, the compiler had to
@@ -111,23 +143,33 @@ class PrecompilerTestCase(TestCase):
self.assertEqual("body { color:#990; }", compiler.input())
self.assertIsNone(compiler.infile) # Cached
- self.cached_precompiler_args['content'] += ' ' # Invalidate cache by slightly changing content
+ self.cached_precompiler_args[
+ "content"
+ ] += " " # Invalidate cache by slightly changing content
compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
self.assertEqual("body { color:#990; }", compiler.input())
self.assertIsNotNone(compiler.infile) # Not cached
- @mock.patch('django.core.cache.backends.locmem.LocMemCache.get')
+ @mock.patch("django.core.cache.backends.locmem.LocMemCache.get")
def test_precompiler_cache_issue750(self, mock_cache):
# emulate memcached and return string
- mock_cache.side_effect = (lambda key: str("body { color:#990; }"))
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
+ mock_cache.side_effect = lambda key: str("body { color:#990; }")
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
self.assertEqual("body { color:#990; }", compiler.input())
- self.assertEqual(type(compiler.input()), type(smart_str("body { color:#990; }")))
+ self.assertEqual(
+ type(compiler.input()), type(smart_str("body { color:#990; }"))
+ )
def test_precompiler_not_cacheable(self):
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
- self.cached_precompiler_args['mimetype'] = 'text/different'
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
+ self.cached_precompiler_args["mimetype"] = "text/different"
compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
self.assertEqual("body { color:#990; }", compiler.input())
self.assertIsNotNone(compiler.infile) # Not cached
@@ -137,7 +179,10 @@ class PrecompilerTestCase(TestCase):
self.assertIsNotNone(compiler.infile) # Not cached
def test_precompiler_caches_empty_files(self):
- command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
+ command = "%s %s -f {infile} -o {outfile}" % (
+ sys.executable,
+ self.test_precompiler,
+ )
compiler = CachedCompilerFilter(command=command, **self.cached_precompiler_args)
self.assertEqual("body { color:#990; }", compiler.input())
@@ -220,14 +265,16 @@ class CalmjsTestCase(TestCase):
@override_settings(
- COMPRESS_ENABLED=True,
- COMPRESS_URL='/static/',
+ COMPRESS_ENABLED=True,
+ COMPRESS_URL="/static/",
)
class CssAbsolutizingTestCase(TestCase):
- hashing_method = 'mtime'
+ hashing_method = "mtime"
hashing_func = staticmethod(get_hashed_mtime)
- template = ("p { background: url('%(url)simg/python.png%(query)s%(hash)s%(frag)s') }"
- "p { filter: Alpha(src='%(url)simg/python.png%(query)s%(hash)s%(frag)s') }")
+ template = (
+ "p { background: url('%(url)simg/python.png%(query)s%(hash)s%(frag)s') }"
+ "p { filter: Alpha(src='%(url)simg/python.png%(query)s%(hash)s%(frag)s') }"
+ )
filter_class = CssAbsoluteFilter
@property
@@ -235,7 +282,9 @@ class CssAbsolutizingTestCase(TestCase):
return settings.COMPRESS_URL
def setUp(self):
- self.override_settings = self.settings(COMPRESS_CSS_HASHING_METHOD=self.hashing_method)
+ self.override_settings = self.settings(
+ COMPRESS_CSS_HASHING_METHOD=self.hashing_method
+ )
self.override_settings.__enter__()
def tearDown(self):
@@ -243,94 +292,132 @@ class CssAbsolutizingTestCase(TestCase):
@override_settings(COMPRESS_CSS_HASHING_METHOD=None)
def test_css_no_hash(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- content = self.template % blankdict(url='../../')
- params = blankdict({
- 'url': self.expected_url_prefix,
- })
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ content = self.template % blankdict(url="../../")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- imagefilename = os.path.join(settings.COMPRESS_ROOT, 'img/python.png')
- content = self.template % blankdict(url='../../')
- params = blankdict({
- 'url': self.expected_url_prefix,
- 'hash': '?' + self.hashing_func(imagefilename),
- })
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ imagefilename = os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ content = self.template % blankdict(url="../../")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ "hash": "?" + self.hashing_func(imagefilename),
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_url_fragment(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- imagefilename = os.path.join(settings.COMPRESS_ROOT, 'img/python.png')
- content = self.template % blankdict(url='../../', frag='#foo')
- params = blankdict({
- 'url': self.expected_url_prefix,
- 'hash': '?' + self.hashing_func(imagefilename),
- 'frag': '#foo',
- })
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ imagefilename = os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ content = self.template % blankdict(url="../../", frag="#foo")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ "hash": "?" + self.hashing_func(imagefilename),
+ "frag": "#foo",
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_only_url_fragment(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
content = "p { background: url('#foo') }"
filter = self.filter_class(content)
- self.assertEqual(content, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ content, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_only_url_fragment_wrap_double_quotes(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
content = 'p { background: url("#foo") }'
filter = self.filter_class(content)
- self.assertEqual(content, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ content, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_querystring(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- imagefilename = os.path.join(settings.COMPRESS_ROOT, 'img/python.png')
- content = self.template % blankdict(url='../../', query='?foo')
- params = blankdict({
- 'url': self.expected_url_prefix,
- 'query': '?foo',
- 'hash': '&' + self.hashing_func(imagefilename),
- })
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ imagefilename = os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ content = self.template % blankdict(url="../../", query="?foo")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ "query": "?foo",
+ "hash": "&" + self.hashing_func(imagefilename),
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_https(self):
- with self.settings(COMPRESS_URL='https://static.example.com/'):
+ with self.settings(COMPRESS_URL="https://static.example.com/"):
self.test_css_absolute_filter()
def test_css_absolute_filter_relative_path(self):
- filename = os.path.join(settings.TEST_DIR, 'whatever', '..', 'static', 'whatever/../css/url/test.css')
- imagefilename = os.path.join(settings.COMPRESS_ROOT, 'img/python.png')
- content = self.template % blankdict(url='../../')
- params = blankdict({
- 'url': self.expected_url_prefix,
- 'hash': '?' + self.hashing_func(imagefilename),
- })
+ filename = os.path.join(
+ settings.TEST_DIR,
+ "whatever",
+ "..",
+ "static",
+ "whatever/../css/url/test.css",
+ )
+ imagefilename = os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ content = self.template % blankdict(url="../../")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ "hash": "?" + self.hashing_func(imagefilename),
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_css_absolute_filter_filename_outside_compress_root(self):
- filename = '/foo/bar/baz/test.css'
- content = self.template % blankdict(url='../qux/')
- params = blankdict({
- 'url': self.expected_url_prefix + 'bar/qux/',
- })
+ filename = "/foo/bar/baz/test.css"
+ content = self.template % blankdict(url="../qux/")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix + "bar/qux/",
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='bar/baz/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="bar/baz/test.css")
+ )
def test_css_hunks(self):
- hash_python_png = self.hashing_func(os.path.join(settings.COMPRESS_ROOT, 'img/python.png'))
- hash_add_png = self.hashing_func(os.path.join(settings.COMPRESS_ROOT, 'img/add.png'))
+ hash_python_png = self.hashing_func(
+ os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ )
+ hash_add_png = self.hashing_func(
+ os.path.join(settings.COMPRESS_ROOT, "img/add.png")
+ )
css1 = """\
p { background: url('%(compress_url)simg/python.png?%(hash)s'); }
@@ -338,7 +425,9 @@ p { background: url(%(compress_url)simg/python.png?%(hash)s); }
p { background: url(%(compress_url)simg/python.png?%(hash)s); }
p { background: url('%(compress_url)simg/python.png?%(hash)s'); }
p { filter: progid:DXImageTransform.Microsoft.AlphaImageLoader(src='%(compress_url)simg/python.png?%(hash)s'); }
-""" % dict(compress_url=self.expected_url_prefix, hash=hash_python_png)
+""" % dict(
+ compress_url=self.expected_url_prefix, hash=hash_python_png
+ )
css2 = """\
p { background: url('%(compress_url)simg/add.png?%(hash)s'); }
@@ -346,118 +435,138 @@ p { background: url(%(compress_url)simg/add.png?%(hash)s); }
p { background: url(%(compress_url)simg/add.png?%(hash)s); }
p { background: url('%(compress_url)simg/add.png?%(hash)s'); }
p { filter: progid:DXImageTransform.Microsoft.AlphaImageLoader(src='%(compress_url)simg/add.png?%(hash)s'); }
-""" % dict(compress_url=self.expected_url_prefix, hash=hash_add_png)
+""" % dict(
+ compress_url=self.expected_url_prefix, hash=hash_add_png
+ )
css = """
<link rel="stylesheet" href="/static/css/url/url1.css" type="text/css">
<link rel="stylesheet" href="/static/css/url/2/url2.css" type="text/css">
"""
- css_node = CssCompressor('css', css)
+ css_node = CssCompressor("css", css)
self.assertEqual([css1, css2], list(css_node.hunks()))
def test_guess_filename(self):
- url = '%s/img/python.png' % settings.COMPRESS_URL.rstrip('/')
- path = os.path.join(settings.COMPRESS_ROOT, 'img/python.png')
+ url = "%s/img/python.png" % settings.COMPRESS_URL.rstrip("/")
+ path = os.path.join(settings.COMPRESS_ROOT, "img/python.png")
content = "p { background: url('%s') }" % url
filter = self.filter_class(content)
self.assertEqual(path, filter.guess_filename(url))
def test_filenames_with_space(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- imagefilename = os.path.join(settings.COMPRESS_ROOT, 'img/add with spaces.png')
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ imagefilename = os.path.join(settings.COMPRESS_ROOT, "img/add with spaces.png")
template = "p { background: url('%(url)simg/add with spaces.png%(query)s%(hash)s%(frag)s') }"
- content = template % blankdict(url='../../')
- params = blankdict({
- 'url': self.expected_url_prefix,
- 'hash': '?' + self.hashing_func(imagefilename),
- })
+ content = template % blankdict(url="../../")
+ params = blankdict(
+ {
+ "url": self.expected_url_prefix,
+ "hash": "?" + self.hashing_func(imagefilename),
+ }
+ )
output = template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
def test_does_not_change_nested_urls(self):
css = """body { background-image: url("data:image/svg+xml;utf8,<svg><rect fill='url(%23gradient)'/></svg>");}"""
filter = self.filter_class(css, filename="doesntmatter")
- self.assertEqual(css, filter.input(filename="doesntmatter", basename="doesntmatter"))
+ self.assertEqual(
+ css, filter.input(filename="doesntmatter", basename="doesntmatter")
+ )
def test_does_not_change_quotes_in_src(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- hash_add_png = self.hashing_func(os.path.join(settings.COMPRESS_ROOT, 'img/add.png'))
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ hash_add_png = self.hashing_func(
+ os.path.join(settings.COMPRESS_ROOT, "img/add.png")
+ )
css = """p { filter: Alpha(src="/img/add.png%(hash)s") }"""
filter = self.filter_class(css % dict(hash=""))
- expected = css % dict(hash='?' + hash_add_png)
- self.assertEqual(expected, filter.input(filename=filename, basename='css/url/test.css'))
+ expected = css % dict(hash="?" + hash_add_png)
+ self.assertEqual(
+ expected, filter.input(filename=filename, basename="css/url/test.css")
+ )
-@override_settings(COMPRESS_URL='http://static.example.com/')
+@override_settings(COMPRESS_URL="http://static.example.com/")
class CssAbsolutizingTestCaseWithDifferentURL(CssAbsolutizingTestCase):
pass
class CssAbsolutizingTestCaseWithHash(CssAbsolutizingTestCase):
- hashing_method = 'content'
+ hashing_method = "content"
hashing_func = staticmethod(get_hashed_content)
@override_settings(
COMPRESS_ENABLED=True,
- COMPRESS_URL='/static/',
- COMPRESS_FILTERS={'css': ['compressor.filters.css_default.CssRelativeFilter']}
+ COMPRESS_URL="/static/",
+ COMPRESS_FILTERS={"css": ["compressor.filters.css_default.CssRelativeFilter"]},
)
class CssRelativizingTestCase(CssAbsolutizingTestCase):
filter_class = CssRelativeFilter
- expected_url_prefix = '../../'
+ expected_url_prefix = "../../"
@override_settings(
- COMPRESS_CSS_HASHING_METHOD=None,
- COMPRESS_OUTPUT_DIR='CACHE/in/depth'
+ COMPRESS_CSS_HASHING_METHOD=None, COMPRESS_OUTPUT_DIR="CACHE/in/depth"
)
def test_nested_cache_dir(self):
- filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
- content = self.template % blankdict(url='../../')
- params = blankdict({
- 'url': '../../../../',
- })
+ filename = os.path.join(settings.COMPRESS_ROOT, "css/url/test.css")
+ content = self.template % blankdict(url="../../")
+ params = blankdict(
+ {
+ "url": "../../../../",
+ }
+ )
output = self.template % params
filter = self.filter_class(content)
- self.assertEqual(output, filter.input(filename=filename,
- basename='css/url/test.css'))
+ self.assertEqual(
+ output, filter.input(filename=filename, basename="css/url/test.css")
+ )
@override_settings(
COMPRESS_ENABLED=True,
- COMPRESS_FILTERS={'css': [
- 'compressor.filters.css_default.CssAbsoluteFilter',
- 'compressor.filters.datauri.CssDataUriFilter',
- ]},
- COMPRESS_URL='/static/',
- COMPRESS_CSS_HASHING_METHOD='mtime'
+ COMPRESS_FILTERS={
+ "css": [
+ "compressor.filters.css_default.CssAbsoluteFilter",
+ "compressor.filters.datauri.CssDataUriFilter",
+ ]
+ },
+ COMPRESS_URL="/static/",
+ COMPRESS_CSS_HASHING_METHOD="mtime",
)
class CssDataUriTestCase(TestCase):
def setUp(self):
self.css = """
<link rel="stylesheet" href="/static/css/datauri.css" type="text/css">
"""
- self.css_node = CssCompressor('css', self.css)
+ self.css_node = CssCompressor("css", self.css)
def test_data_uris(self):
- datauri_hash = get_hashed_mtime(os.path.join(settings.COMPRESS_ROOT, 'img/python.png'))
- out = ['''.add { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAAK/INwWK6QAAABl0RVh0U29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZTwAAAJvSURBVDjLpZPrS5NhGIf9W7YvBYOkhlkoqCklWChv2WyKik7blnNris72bi6dus0DLZ0TDxW1odtopDs4D8MDZuLU0kXq61CijSIIasOvv94VTUfLiB74fXngup7nvrnvJABJ/5PfLnTTdcwOj4RsdYmo5glBWP6iOtzwvIKSWstI0Wgx80SBblpKtE9KQs/We7EaWoT/8wbWP61gMmCH0lMDvokT4j25TiQU/ITFkek9Ow6+7WH2gwsmahCPdwyw75uw9HEO2gUZSkfyI9zBPCJOoJ2SMmg46N61YO/rNoa39Xi41oFuXysMfh36/Fp0b7bAfWAH6RGi0HglWNCbzYgJaFjRv6zGuy+b9It96N3SQvNKiV9HvSaDfFEIxXItnPs23BzJQd6DDEVM0OKsoVwBG/1VMzpXVWhbkUM2K4oJBDYuGmbKIJ0qxsAbHfRLzbjcnUbFBIpx/qH3vQv9b3U03IQ/HfFkERTzfFj8w8jSpR7GBE123uFEYAzaDRIqX/2JAtJbDat/COkd7CNBva2cMvq0MGxp0PRSCPF8BXjWG3FgNHc9XPT71Ojy3sMFdfJRCeKxEsVtKwFHwALZfCUk3tIfNR8XiJwc1LmL4dg141JPKtj3WUdNFJqLGFVPC4OkR4BxajTWsChY64wmCnMxsWPCHcutKBxMVp5mxA1S+aMComToaqTRUQknLTH62kHOVEE+VQnjahscNCy0cMBWsSI0TCQcZc5ALkEYckL5A5noWSBhfm2AecMAjbcRWV0pUTh0HE64TNf0mczcnnQyu/MilaFJCae1nw2fbz1DnVOxyGTlKeZft/Ff8x1BRssfACjTwQAAAABJRU5ErkJggg=="); }
+ datauri_hash = get_hashed_mtime(
+ os.path.join(settings.COMPRESS_ROOT, "img/python.png")
+ )
+ out = [
+ """.add { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAAK/INwWK6QAAABl0RVh0U29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZTwAAAJvSURBVDjLpZPrS5NhGIf9W7YvBYOkhlkoqCklWChv2WyKik7blnNris72bi6dus0DLZ0TDxW1odtopDs4D8MDZuLU0kXq61CijSIIasOvv94VTUfLiB74fXngup7nvrnvJABJ/5PfLnTTdcwOj4RsdYmo5glBWP6iOtzwvIKSWstI0Wgx80SBblpKtE9KQs/We7EaWoT/8wbWP61gMmCH0lMDvokT4j25TiQU/ITFkek9Ow6+7WH2gwsmahCPdwyw75uw9HEO2gUZSkfyI9zBPCJOoJ2SMmg46N61YO/rNoa39Xi41oFuXysMfh36/Fp0b7bAfWAH6RGi0HglWNCbzYgJaFjRv6zGuy+b9It96N3SQvNKiV9HvSaDfFEIxXItnPs23BzJQd6DDEVM0OKsoVwBG/1VMzpXVWhbkUM2K4oJBDYuGmbKIJ0qxsAbHfRLzbjcnUbFBIpx/qH3vQv9b3U03IQ/HfFkERTzfFj8w8jSpR7GBE123uFEYAzaDRIqX/2JAtJbDat/COkd7CNBva2cMvq0MGxp0PRSCPF8BXjWG3FgNHc9XPT71Ojy3sMFdfJRCeKxEsVtKwFHwALZfCUk3tIfNR8XiJwc1LmL4dg141JPKtj3WUdNFJqLGFVPC4OkR4BxajTWsChY64wmCnMxsWPCHcutKBxMVp5mxA1S+aMComToaqTRUQknLTH62kHOVEE+VQnjahscNCy0cMBWsSI0TCQcZc5ALkEYckL5A5noWSBhfm2AecMAjbcRWV0pUTh0HE64TNf0mczcnnQyu/MilaFJCae1nw2fbz1DnVOxyGTlKeZft/Ff8x1BRssfACjTwQAAAABJRU5ErkJggg=="); }
.add-with-hash { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABGdBTUEAAK/INwWK6QAAABl0RVh0U29mdHdhcmUAQWRvYmUgSW1hZ2VSZWFkeXHJZTwAAAJvSURBVDjLpZPrS5NhGIf9W7YvBYOkhlkoqCklWChv2WyKik7blnNris72bi6dus0DLZ0TDxW1odtopDs4D8MDZuLU0kXq61CijSIIasOvv94VTUfLiB74fXngup7nvrnvJABJ/5PfLnTTdcwOj4RsdYmo5glBWP6iOtzwvIKSWstI0Wgx80SBblpKtE9KQs/We7EaWoT/8wbWP61gMmCH0lMDvokT4j25TiQU/ITFkek9Ow6+7WH2gwsmahCPdwyw75uw9HEO2gUZSkfyI9zBPCJOoJ2SMmg46N61YO/rNoa39Xi41oFuXysMfh36/Fp0b7bAfWAH6RGi0HglWNCbzYgJaFjRv6zGuy+b9It96N3SQvNKiV9HvSaDfFEIxXItnPs23BzJQd6DDEVM0OKsoVwBG/1VMzpXVWhbkUM2K4oJBDYuGmbKIJ0qxsAbHfRLzbjcnUbFBIpx/qH3vQv9b3U03IQ/HfFkERTzfFj8w8jSpR7GBE123uFEYAzaDRIqX/2JAtJbDat/COkd7CNBva2cMvq0MGxp0PRSCPF8BXjWG3FgNHc9XPT71Ojy3sMFdfJRCeKxEsVtKwFHwALZfCUk3tIfNR8XiJwc1LmL4dg141JPKtj3WUdNFJqLGFVPC4OkR4BxajTWsChY64wmCnMxsWPCHcutKBxMVp5mxA1S+aMComToaqTRUQknLTH62kHOVEE+VQnjahscNCy0cMBWsSI0TCQcZc5ALkEYckL5A5noWSBhfm2AecMAjbcRWV0pUTh0HE64TNf0mczcnnQyu/MilaFJCae1nw2fbz1DnVOxyGTlKeZft/Ff8x1BRssfACjTwQAAAABJRU5ErkJggg=="); }
.python { background-image: url("/static/img/python.png?%s"); }
.datauri { background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAABGdBTUEAALGPC/xhBQAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAd0SU1FB9YGARc5KB0XV+IAAAAddEVYdENvbW1lbnQAQ3JlYXRlZCB3aXRoIFRoZSBHSU1Q72QlbgAAAF1JREFUGNO9zL0NglAAxPEfdLTs4BZM4DIO4C7OwQg2JoQ9LE1exdlYvBBeZ7jqch9//q1uH4TLzw4d6+ErXMMcXuHWxId3KOETnnXXV6MJpcq2MLaI97CER3N0 vr4MkhoXe0rZigAAAABJRU5ErkJggg=="); }
-''' % datauri_hash]
+"""
+ % datauri_hash
+ ]
self.assertEqual(out, list(self.css_node.hunks()))
class TemplateTestCase(TestCase):
- @override_settings(COMPRESS_TEMPLATE_FILTER_CONTEXT={
- 'stuff': 'thing',
- 'gimmick': 'bold'
- })
+ @override_settings(
+ COMPRESS_TEMPLATE_FILTER_CONTEXT={"stuff": "thing", "gimmick": "bold"}
+ )
def test_template_filter(self):
content = """
#content {background-image: url("{{ STATIC_URL|default:stuff }}/images/bg.png");}
@@ -474,28 +583,48 @@ class SpecializedFiltersTest(TestCase):
"""
Test to check the Specializations of filters.
"""
+
def test_closure_filter(self):
- filter = ClosureCompilerFilter('')
- self.assertEqual(filter.options, (('binary', str('java -jar compiler.jar')), ('args', str(''))))
+ filter = ClosureCompilerFilter("")
+ self.assertEqual(
+ filter.options,
+ (("binary", str("java -jar compiler.jar")), ("args", str(""))),
+ )
def test_yuglify_filters(self):
- filter = YUglifyCSSFilter('')
- self.assertEqual(filter.command, '{binary} {args} --type=css')
- self.assertEqual(filter.options, (('binary', str('yuglify')), ('args', str('--terminal'))))
-
- filter = YUglifyJSFilter('')
- self.assertEqual(filter.command, '{binary} {args} --type=js')
- self.assertEqual(filter.options, (('binary', str('yuglify')), ('args', str('--terminal'))))
+ filter = YUglifyCSSFilter("")
+ self.assertEqual(filter.command, "{binary} {args} --type=css")
+ self.assertEqual(
+ filter.options, (("binary", str("yuglify")), ("args", str("--terminal")))
+ )
+
+ filter = YUglifyJSFilter("")
+ self.assertEqual(filter.command, "{binary} {args} --type=js")
+ self.assertEqual(
+ filter.options, (("binary", str("yuglify")), ("args", str("--terminal")))
+ )
def test_yui_filters(self):
- filter = YUICSSFilter('')
- self.assertEqual(filter.command, '{binary} {args} --type=css')
- self.assertEqual(filter.options, (('binary', str('java -jar yuicompressor.jar')), ('args', str(''))))
-
- filter = YUIJSFilter('', verbose=1)
- self.assertEqual(filter.command, '{binary} {args} --type=js --verbose')
- self.assertEqual(filter.options, (('binary', str('java -jar yuicompressor.jar')), ('args', str('')), ('verbose', 1)))
+ filter = YUICSSFilter("")
+ self.assertEqual(filter.command, "{binary} {args} --type=css")
+ self.assertEqual(
+ filter.options,
+ (("binary", str("java -jar yuicompressor.jar")), ("args", str(""))),
+ )
+
+ filter = YUIJSFilter("", verbose=1)
+ self.assertEqual(filter.command, "{binary} {args} --type=js --verbose")
+ self.assertEqual(
+ filter.options,
+ (
+ ("binary", str("java -jar yuicompressor.jar")),
+ ("args", str("")),
+ ("verbose", 1),
+ ),
+ )
def test_clean_css_filter(self):
- filter = CleanCSSFilter('')
- self.assertEqual(filter.options, (('binary', str('cleancss')), ('args', str(''))))
+ filter = CleanCSSFilter("")
+ self.assertEqual(
+ filter.options, (("binary", str("cleancss")), ("args", str("")))
+ )
diff --git a/compressor/tests/test_finder.py b/compressor/tests/test_finder.py
index 0420cd4..0ba4afc 100644
--- a/compressor/tests/test_finder.py
+++ b/compressor/tests/test_finder.py
@@ -5,7 +5,6 @@ from compressor.storage import CompressorFileStorage
class FinderTestCase(TestCase):
-
def test_has_correct_storage(self):
finder = CompressorFinder()
self.assertTrue(type(finder.storage) is CompressorFileStorage)
diff --git a/compressor/tests/test_jinja2ext.py b/compressor/tests/test_jinja2ext.py
index 98ce548..4f66b3b 100644
--- a/compressor/tests/test_jinja2ext.py
+++ b/compressor/tests/test_jinja2ext.py
@@ -14,132 +14,172 @@ class TestJinja2CompressorExtension(TestCase):
that we use jinja2 specific controls (*minus* character at block's
beginning or end). For more information see jinja2 documentation.
"""
+
def assertStrippedEqual(self, result, expected):
- self.assertEqual(result.strip(), expected.strip(), "%r != %r" % (
- result.strip(), expected.strip()))
+ self.assertEqual(
+ result.strip(),
+ expected.strip(),
+ "%r != %r" % (result.strip(), expected.strip()),
+ )
def setUp(self):
import jinja2
+
self.jinja2 = jinja2
from compressor.contrib.jinja2ext import CompressorExtension
+
self.env = self.jinja2.Environment(extensions=[CompressorExtension])
def test_error_raised_if_no_arguments_given(self):
- self.assertRaises(self.jinja2.exceptions.TemplateSyntaxError,
- self.env.from_string, '{% compress %}Foobar{% endcompress %}')
+ self.assertRaises(
+ self.jinja2.exceptions.TemplateSyntaxError,
+ self.env.from_string,
+ "{% compress %}Foobar{% endcompress %}",
+ )
def test_error_raised_if_wrong_kind_given(self):
- self.assertRaises(self.jinja2.exceptions.TemplateSyntaxError,
- self.env.from_string, '{% compress foo %}Foobar{% endcompress %}')
+ self.assertRaises(
+ self.jinja2.exceptions.TemplateSyntaxError,
+ self.env.from_string,
+ "{% compress foo %}Foobar{% endcompress %}",
+ )
def test_error_raised_if_wrong_closing_kind_given(self):
- self.assertRaises(self.jinja2.exceptions.TemplateSyntaxError,
- self.env.from_string, '{% compress js %}Foobar{% endcompress css %}')
+ self.assertRaises(
+ self.jinja2.exceptions.TemplateSyntaxError,
+ self.env.from_string,
+ "{% compress js %}Foobar{% endcompress css %}",
+ )
def test_error_raised_if_wrong_mode_given(self):
- self.assertRaises(self.jinja2.exceptions.TemplateSyntaxError,
- self.env.from_string, '{% compress css foo %}Foobar{% endcompress %}')
+ self.assertRaises(
+ self.jinja2.exceptions.TemplateSyntaxError,
+ self.env.from_string,
+ "{% compress css foo %}Foobar{% endcompress %}",
+ )
@override_settings(COMPRESS_ENABLED=False)
def test_compress_is_disabled(self):
- tag_body = '\n'.join([
- '<link rel="stylesheet" href="css/one.css" type="text/css" charset="utf-8">',
- '<style type="text/css">p { border:5px solid green;}</style>',
- '<link rel="stylesheet" href="css/two.css" type="text/css" charset="utf-8">',
- ])
- template_string = '{% compress css %}' + tag_body + '{% endcompress %}'
+ tag_body = "\n".join(
+ [
+ '<link rel="stylesheet" href="css/one.css" type="text/css" charset="utf-8">',
+ '<style type="text/css">p { border:5px solid green;}</style>',
+ '<link rel="stylesheet" href="css/two.css" type="text/css" charset="utf-8">',
+ ]
+ )
+ template_string = "{% compress css %}" + tag_body + "{% endcompress %}"
template = self.env.from_string(template_string)
self.assertEqual(tag_body, template.render())
# Test with explicit kind
- template_string = '{% compress css %}' + tag_body + '{% endcompress css %}'
+ template_string = "{% compress css %}" + tag_body + "{% endcompress css %}"
template = self.env.from_string(template_string)
self.assertEqual(tag_body, template.render())
def test_empty_tag(self):
- template = self.env.from_string("""{% compress js %}{% block js %}{% endblock %}{% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
- self.assertEqual('', template.render(context))
+ template = self.env.from_string(
+ """{% compress js %}{% block js %}{% endblock %}{% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
+ self.assertEqual("", template.render(context))
def test_empty_tag_with_kind(self):
- template = self.env.from_string("""{% compress js %}{% block js %}
- {% endblock %}{% endcompress js %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
- self.assertEqual('', template.render(context))
+ template = self.env.from_string(
+ """{% compress js %}{% block js %}
+ {% endblock %}{% endcompress js %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
+ self.assertEqual("", template.render(context))
def test_css_tag(self):
- template = self.env.from_string("""{% compress css -%}
+ template = self.env.from_string(
+ """{% compress css -%}
<link rel="stylesheet" href="{{ STATIC_URL }}css/one.css" type="text/css" charset="utf-8">
<style type="text/css">p { border:5px solid green;}</style>
<link rel="stylesheet" href="{{ STATIC_URL }}css/two.css" type="text/css" charset="utf-8">
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = css_tag("/static/CACHE/css/output.600674ea1d3d.css")
self.assertEqual(out, template.render(context))
def test_nonascii_css_tag(self):
- template = self.env.from_string("""{% compress css -%}
+ template = self.env.from_string(
+ """{% compress css -%}
<link rel="stylesheet" href="{{ STATIC_URL }}css/nonasc.css" type="text/css" charset="utf-8">
<style type="text/css">p { border:5px solid green;}</style>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = css_tag("/static/CACHE/css/output.d5444a1ab4a3.css")
self.assertEqual(out, template.render(context))
def test_js_tag(self):
- template = self.env.from_string("""{% compress js -%}
+ template = self.env.from_string(
+ """{% compress js -%}
<script src="{{ STATIC_URL }}js/one.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript" charset="utf-8">obj.value = "value";</script>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = '<script src="/static/CACHE/js/output.8a0fed36c317.js"></script>'
self.assertEqual(out, template.render(context))
def test_nonascii_js_tag(self):
- template = self.env.from_string("""{% compress js -%}
+ template = self.env.from_string(
+ """{% compress js -%}
<script src="{{ STATIC_URL }}js/nonasc.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript" charset="utf-8">var test_value = "\u2014";</script>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = '<script src="/static/CACHE/js/output.8c00f1cf1e0a.js"></script>'
self.assertEqual(out, template.render(context))
def test_nonascii_latin1_js_tag(self):
- template = self.env.from_string("""{% compress js -%}
+ template = self.env.from_string(
+ """{% compress js -%}
<script src="{{ STATIC_URL }}js/nonasc-latin1.js" type="text/javascript" charset="latin-1"></script>
<script type="text/javascript">var test_value = "\u2014";</script>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = '<script src="/static/CACHE/js/output.06a98ccfd380.js"></script>'
self.assertEqual(out, template.render(context))
def test_css_inline(self):
- template = self.env.from_string("""{% compress css, inline -%}
+ template = self.env.from_string(
+ """{% compress css, inline -%}
<link rel="stylesheet" href="{{ STATIC_URL }}css/one.css" type="text/css" charset="utf-8">
<style type="text/css">p { border:5px solid green;}</style>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = (
'<style type="text/css">body{background:#990}'
- 'p{border:5px solid green}</style>'
+ "p{border:5px solid green}</style>"
)
self.assertEqual(out, template.render(context))
def test_js_inline(self):
- template = self.env.from_string("""{% compress js, inline -%}
+ template = self.env.from_string(
+ """{% compress js, inline -%}
<script src="{{ STATIC_URL }}js/one.js" type="text/css" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript" charset="utf-8">obj.value = "value";</script>
- {% endcompress %}""")
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ {% endcompress %}"""
+ )
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = '<script>obj={};;obj.value="value";;</script>'
self.assertEqual(out, template.render(context))
def test_nonascii_inline_css(self):
with self.settings(COMPRESS_ENABLED=False):
- template = self.env.from_string('{% compress css %}'
- '<style type="text/css">'
- '/* русский текст */'
- '</style>{% endcompress %}')
+ template = self.env.from_string(
+ "{% compress css %}"
+ '<style type="text/css">'
+ "/* русский текст */"
+ "</style>{% endcompress %}"
+ )
out = '<link rel="stylesheet" href="/static/CACHE/css/output.e3b0c44298fc.css" type="text/css">'
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ context = {"STATIC_URL": settings.COMPRESS_URL}
self.assertEqual(out, template.render(context))
diff --git a/compressor/tests/test_mtime_cache.py b/compressor/tests/test_mtime_cache.py
index 7ad2ffa..f0fe1fa 100644
--- a/compressor/tests/test_mtime_cache.py
+++ b/compressor/tests/test_mtime_cache.py
@@ -9,30 +9,33 @@ class TestMtimeCacheCommand(TestCase):
# FIXME: add actual tests, improve the existing ones.
exclusion_patterns = [
- '*CACHE*', '*custom*', '*066cd253eada.js', '*d728fc7f9301.js', '*8a0fed36c317.js', 'test.txt*'
+ "*CACHE*",
+ "*custom*",
+ "*066cd253eada.js",
+ "*d728fc7f9301.js",
+ "*8a0fed36c317.js",
+ "test.txt*",
]
def default_ignore(self):
- return ['--ignore=%s' % pattern for pattern in self.exclusion_patterns]
+ return ["--ignore=%s" % pattern for pattern in self.exclusion_patterns]
def test_handle_no_args(self):
with self.assertRaises(CommandError):
- call_command('mtime_cache')
+ call_command("mtime_cache")
def test_handle_add(self):
out = io.StringIO()
with self.settings(CACHES={}):
- call_command(
- 'mtime_cache', '--add', *self.default_ignore(), stdout=out)
+ call_command("mtime_cache", "--add", *self.default_ignore(), stdout=out)
output = out.getvalue()
- self.assertIn('Deleted mtimes of 20 files from the cache.', output)
- self.assertIn('Added mtimes of 20 files to cache.', output)
+ self.assertIn("Deleted mtimes of 20 files from the cache.", output)
+ self.assertIn("Added mtimes of 20 files to cache.", output)
def test_handle_clean(self):
out = io.StringIO()
with self.settings(CACHES={}):
- call_command(
- 'mtime_cache', '--clean', *self.default_ignore(), stdout=out)
+ call_command("mtime_cache", "--clean", *self.default_ignore(), stdout=out)
output = out.getvalue()
- self.assertIn('Deleted mtimes of 20 files from the cache.', output)
- self.assertNotIn('Added mtimes of 20 files to cache.', output)
+ self.assertIn("Deleted mtimes of 20 files from the cache.", output)
+ self.assertNotIn("Added mtimes of 20 files to cache.", output)
diff --git a/compressor/tests/test_offline.py b/compressor/tests/test_offline.py
index ce47493..1af5cb2 100644
--- a/compressor/tests/test_offline.py
+++ b/compressor/tests/test_offline.py
@@ -21,11 +21,11 @@ from compressor.utils import get_mod_func
def offline_context_generator():
for i in range(1, 4):
- yield {'content': 'OK %d!' % i}
+ yield {"content": "OK %d!" % i}
def static_url_context_generator():
- yield {'STATIC_URL': settings.STATIC_URL}
+ yield {"STATIC_URL": settings.STATIC_URL}
class LazyScriptNamePrefixedUrl(str):
@@ -48,8 +48,9 @@ class LazyScriptNamePrefixedUrl(str):
The implementation is incomplete, all ``str`` methods must be overridden
in order to work correctly with the rest of Django core.
"""
+
def __str__(self):
- return get_script_prefix() + self[1:] if self.startswith('/') else self
+ return get_script_prefix() + self[1:] if self.startswith("/") else self
def __unicode__(self):
return str(self)
@@ -61,7 +62,7 @@ class LazyScriptNamePrefixedUrl(str):
return str(self).split(*args, **kwargs)
def replace(self, *args, **kwargs):
- """ Override ``.replace()`` to make it work with ``{% static %}``.
+ """Override ``.replace()`` to make it work with ``{% static %}``.
In ``django.core.files.storage``, ``FileSystemStorage.url()`` passes
this object to ``urllib.parse.urljoin``.
@@ -87,14 +88,14 @@ def script_prefix(new_prefix):
class OfflineTestCaseMixin:
- CHARSET = 'utf-8'
- template_name = 'test_compressor_offline.html'
+ CHARSET = "utf-8"
+ template_name = "test_compressor_offline.html"
# Change this for each test class
- templates_dir = ''
- expected_basename = 'output'
- expected_hash = ''
+ templates_dir = ""
+ expected_basename = "output"
+ expected_hash = ""
# Engines to test
- engines = ('django', 'jinja2')
+ engines = ("django", "jinja2")
additional_test_settings = None
def setUp(self):
@@ -108,23 +109,22 @@ class OfflineTestCaseMixin:
# TEMPLATES[1] to be Jinja2 templates backend in test_settings.
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
- django_template_dir = os.path.join(
- TEMPLATES[0]['DIRS'][0], self.templates_dir)
- jinja2_template_dir = os.path.join(
- TEMPLATES[1]['DIRS'][0], self.templates_dir)
+ django_template_dir = os.path.join(TEMPLATES[0]["DIRS"][0], self.templates_dir)
+ jinja2_template_dir = os.path.join(TEMPLATES[1]["DIRS"][0], self.templates_dir)
- TEMPLATES[0]['DIRS'] = [django_template_dir]
- TEMPLATES[1]['DIRS'] = [jinja2_template_dir]
+ TEMPLATES[0]["DIRS"] = [django_template_dir]
+ TEMPLATES[1]["DIRS"] = [jinja2_template_dir]
override_settings = {
- 'TEMPLATES': TEMPLATES,
- 'COMPRESS_ENABLED': True,
- 'COMPRESS_OFFLINE': True
+ "TEMPLATES": TEMPLATES,
+ "COMPRESS_ENABLED": True,
+ "COMPRESS_OFFLINE": True,
}
- if 'jinja2' in self.engines:
- override_settings['COMPRESS_JINJA2_GET_ENVIRONMENT'] = (
- lambda: self._get_jinja2_env())
+ if "jinja2" in self.engines:
+ override_settings[
+ "COMPRESS_JINJA2_GET_ENVIRONMENT"
+ ] = lambda: self._get_jinja2_env()
if self.additional_test_settings is not None:
override_settings.update(self.additional_test_settings)
@@ -132,29 +132,29 @@ class OfflineTestCaseMixin:
self.override_settings = self.settings(**override_settings)
self.override_settings.__enter__()
- if 'django' in self.engines:
- self.template_path = os.path.join(
- django_template_dir, self.template_name)
+ if "django" in self.engines:
+ self.template_path = os.path.join(django_template_dir, self.template_name)
- origin = Origin(name=self.template_path, # Absolute path
- template_name=self.template_name) # Loader-relative path
- with io.open(self.template_path,
- encoding=self.CHARSET) as file_:
+ origin = Origin(
+ name=self.template_path, # Absolute path
+ template_name=self.template_name,
+ ) # Loader-relative path
+ with io.open(self.template_path, encoding=self.CHARSET) as file_:
self.template = Template(file_.read(), origin=origin)
- if 'jinja2' in self.engines:
+ if "jinja2" in self.engines:
self.template_path_jinja2 = os.path.join(
- jinja2_template_dir, self.template_name)
- jinja2_env = override_settings['COMPRESS_JINJA2_GET_ENVIRONMENT']()
+ jinja2_template_dir, self.template_name
+ )
+ jinja2_env = override_settings["COMPRESS_JINJA2_GET_ENVIRONMENT"]()
- with io.open(self.template_path_jinja2,
- encoding=self.CHARSET) as file_:
+ with io.open(self.template_path_jinja2, encoding=self.CHARSET) as file_:
self.template_jinja2 = jinja2_env.from_string(file_.read())
def tearDown(self):
self.override_settings.__exit__(None, None, None)
- manifest_filename = 'manifest.json'
+ manifest_filename = "manifest.json"
if default_offline_manifest_storage.exists(manifest_filename):
default_offline_manifest_storage.delete(manifest_filename)
@@ -162,27 +162,23 @@ class OfflineTestCaseMixin:
contexts = settings.COMPRESS_OFFLINE_CONTEXT
if not isinstance(contexts, (list, tuple)):
contexts = [contexts]
- if engine == 'django':
+ if engine == "django":
return [Context(c) for c in contexts]
- if engine == 'jinja2':
+ if engine == "jinja2":
return contexts
return None
def _render_template(self, engine):
contexts = self._prepare_contexts(engine)
- if engine == 'django':
- return ''.join(self.template.render(c) for c in contexts)
- if engine == 'jinja2':
- return '\n'.join(
- self.template_jinja2.render(c) for c in contexts) + '\n'
+ if engine == "django":
+ return "".join(self.template.render(c) for c in contexts)
+ if engine == "jinja2":
+ return "\n".join(self.template_jinja2.render(c) for c in contexts) + "\n"
return None
def _render_script(self, hash):
- return (
- '<script src="{}CACHE/js/{}.{}.js">'
- '</script>'.format(
- settings.COMPRESS_URL_PLACEHOLDER, self.expected_basename, hash
- )
+ return '<script src="{}CACHE/js/{}.{}.js">' "</script>".format(
+ settings.COMPRESS_URL_PLACEHOLDER, self.expected_basename, hash
)
def _render_link(self, hash):
@@ -193,8 +189,8 @@ class OfflineTestCaseMixin:
)
)
- def _render_result(self, result, separator='\n'):
- return (separator.join(result) + '\n').replace(
+ def _render_result(self, result, separator="\n"):
+ return (separator.join(result) + "\n").replace(
settings.COMPRESS_URL_PLACEHOLDER, str(settings.COMPRESS_URL)
)
@@ -203,41 +199,42 @@ class OfflineTestCaseMixin:
if not isinstance(hashes, (list, tuple)):
hashes = [hashes]
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(len(hashes), count)
self.assertEqual([self._render_script(h) for h in hashes], result)
rendered_template = self._render_template(engine)
self.assertEqual(rendered_template, self._render_result(result))
def test_offline_django(self):
- if 'django' not in self.engines:
- raise SkipTest('This test class does not support django engine.')
- self._test_offline(engine='django')
+ if "django" not in self.engines:
+ raise SkipTest("This test class does not support django engine.")
+ self._test_offline(engine="django")
def test_offline_jinja2(self):
- if 'jinja2' not in self.engines:
- raise SkipTest('This test class does not support jinja2 engine.')
- self._test_offline(engine='jinja2')
+ if "jinja2" not in self.engines:
+ raise SkipTest("This test class does not support jinja2 engine.")
+ self._test_offline(engine="jinja2")
def test_offline_django_verbosity_1(self):
- if 'django' not in self.engines:
- raise SkipTest('This test class does not support django engine.')
- self._test_offline(engine='django', verbosity=1)
+ if "django" not in self.engines:
+ raise SkipTest("This test class does not support django engine.")
+ self._test_offline(engine="django", verbosity=1)
def test_offline_jinja2_verbosity_1(self):
- if 'jinja2' not in self.engines:
- raise SkipTest('This test class does not support jinja2 engine.')
- self._test_offline(engine='jinja2', verbosity=1)
+ if "jinja2" not in self.engines:
+ raise SkipTest("This test class does not support jinja2 engine.")
+ self._test_offline(engine="jinja2", verbosity=1)
def test_offline_django_verbosity_2(self):
- if 'django' not in self.engines:
- raise SkipTest('This test class does not support django engine.')
- self._test_offline(engine='django', verbosity=2)
+ if "django" not in self.engines:
+ raise SkipTest("This test class does not support django engine.")
+ self._test_offline(engine="django", verbosity=2)
def test_offline_jinja2_verbosity_2(self):
- if 'jinja2' not in self.engines:
- raise SkipTest('This test class does not support jinja2 engine.')
- self._test_offline(engine='jinja2', verbosity=2)
+ if "jinja2" not in self.engines:
+ raise SkipTest("This test class does not support jinja2 engine.")
+ self._test_offline(engine="jinja2", verbosity=2)
def _get_jinja2_env(self):
import jinja2.ext
@@ -252,7 +249,7 @@ class OfflineTestCaseMixin:
]
loader = self._get_jinja2_loader()
env = jinja2.Environment(extensions=extensions, loader=loader)
- env.globals['url_for'] = url_for
+ env.globals["url_for"] = url_for
return env
@@ -260,35 +257,36 @@ class OfflineTestCaseMixin:
import jinja2
loader = jinja2.FileSystemLoader(
- settings.TEMPLATES[1]['DIRS'], encoding=self.CHARSET)
+ settings.TEMPLATES[1]["DIRS"], encoding=self.CHARSET
+ )
return loader
class OfflineCompressBasicTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'basic'
- expected_hash = '822ac7501287'
+ templates_dir = "basic"
+ expected_hash = "822ac7501287"
- @patch.object(CompressCommand, 'compress')
+ @patch.object(CompressCommand, "compress")
def test_handle_no_args(self, compress_mock):
compress_mock.return_value = {}, 1, []
CompressCommand().handle()
self.assertEqual(compress_mock.call_count, 1)
- @patch.object(CompressCommand, 'compress')
+ @patch.object(CompressCommand, "compress")
def test_handle_compress_disabled(self, compress_mock):
with self.settings(COMPRESS_ENABLED=False):
with self.assertRaises(CommandError):
CompressCommand().handle()
self.assertEqual(compress_mock.call_count, 0)
- @patch.object(CompressCommand, 'compress')
+ @patch.object(CompressCommand, "compress")
def test_handle_compress_offline_disabled(self, compress_mock):
with self.settings(COMPRESS_OFFLINE=False):
with self.assertRaises(CommandError):
CompressCommand().handle()
self.assertEqual(compress_mock.call_count, 0)
- @patch.object(CompressCommand, 'compress')
+ @patch.object(CompressCommand, "compress")
def test_handle_compress_offline_disabled_force(self, compress_mock):
compress_mock.return_value = {}, 1, []
with self.settings(COMPRESS_OFFLINE=False):
@@ -298,19 +296,17 @@ class OfflineCompressBasicTestCase(OfflineTestCaseMixin, TestCase):
def test_rendering_without_manifest_raises_exception(self):
# flush cached manifest
flush_offline_manifest()
- self.assertRaises(OfflineGenerationError,
- self.template.render, Context({}))
+ self.assertRaises(OfflineGenerationError, self.template.render, Context({}))
def test_rendering_without_manifest_raises_exception_jinja2(self):
# flush cached manifest
flush_offline_manifest()
- self.assertRaises(OfflineGenerationError,
- self.template_jinja2.render, {})
+ self.assertRaises(OfflineGenerationError, self.template_jinja2.render, {})
def _test_deleting_manifest_does_not_affect_rendering(self, engine):
count, result = CompressCommand().handle_inner(engines=[engine], verbosity=0)
get_offline_manifest()
- manifest_filename = 'manifest.json'
+ manifest_filename = "manifest.json"
if default_offline_manifest_storage.exists(manifest_filename):
default_offline_manifest_storage.delete(manifest_filename)
self.assertEqual(1, count)
@@ -324,301 +320,327 @@ class OfflineCompressBasicTestCase(OfflineTestCaseMixin, TestCase):
def test_get_loaders(self):
TEMPLATE_LOADERS = (
- ('django.template.loaders.cached.Loader', (
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
- )),
+ (
+ "django.template.loaders.cached.Loader",
+ (
+ "django.template.loaders.filesystem.Loader",
+ "django.template.loaders.app_directories.Loader",
+ ),
+ ),
)
with self.settings(TEMPLATE_LOADERS=TEMPLATE_LOADERS):
- from django.template.loaders.filesystem import (
- Loader as FileSystemLoader)
+ from django.template.loaders.filesystem import Loader as FileSystemLoader
from django.template.loaders.app_directories import (
- Loader as AppDirectoriesLoader)
+ Loader as AppDirectoriesLoader,
+ )
+
loaders = CompressCommand().get_loaders()
self.assertTrue(isinstance(loaders[0], FileSystemLoader))
self.assertTrue(isinstance(loaders[1], AppDirectoriesLoader))
- @patch("compressor.offline.django.DjangoParser.render_node",
- side_effect=Exception(b"non-ascii character here:\xc3\xa4"))
+ @patch(
+ "compressor.offline.django.DjangoParser.render_node",
+ side_effect=Exception(b"non-ascii character here:\xc3\xa4"),
+ )
def test_non_ascii_exception_messages(self, mock):
with self.assertRaises(CommandError):
CompressCommand().handle(verbosity=0)
class OfflineCompressSkipDuplicatesTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_duplicate'
+ templates_dir = "test_duplicate"
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
# Only one block compressed, the second identical one was skipped.
self.assertEqual(1, count)
# Only 1 <script> block in returned result as well.
- self.assertEqual([self._render_script('822ac7501287')], result)
+ self.assertEqual([self._render_script("822ac7501287")], result)
rendered_template = self._render_template(engine)
# But rendering the template returns both (identical) scripts.
- self.assertEqual(
- rendered_template, self._render_result(result * 2, ''))
+ self.assertEqual(rendered_template, self._render_result(result * 2, ""))
class SuperMixin:
# Block.super not supported for Jinja2 yet.
- engines = ('django',)
+ engines = ("django",)
-class OfflineCompressBlockSuperTestCase(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_block_super'
- expected_hash = '817b5defb197'
+class OfflineCompressBlockSuperTestCase(SuperMixin, OfflineTestCaseMixin, TestCase):
+ templates_dir = "test_block_super"
+ expected_hash = "817b5defb197"
class OfflineCompressBlockSuperMultipleTestCase(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_block_super_multiple'
- expected_hash = 'd3f749e83c81'
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_block_super_multiple"
+ expected_hash = "d3f749e83c81"
class OfflineCompressBlockSuperMultipleCachedLoaderTestCase(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_block_super_multiple_cached'
- expected_hash = '055f88f4751f'
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_block_super_multiple_cached"
+ expected_hash = "055f88f4751f"
additional_test_settings = {
- 'TEMPLATE_LOADERS': (
- ('django.template.loaders.cached.Loader', (
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
- )),
+ "TEMPLATE_LOADERS": (
+ (
+ "django.template.loaders.cached.Loader",
+ (
+ "django.template.loaders.filesystem.Loader",
+ "django.template.loaders.app_directories.Loader",
+ ),
+ ),
)
}
class OfflineCompressBlockSuperTestCaseWithExtraContent(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_block_super_extra'
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_block_super_extra"
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(2, count)
- self.assertEqual([
- self._render_script('bfcec76e0f28'),
- self._render_script('817b5defb197')
- ], result)
+ self.assertEqual(
+ [self._render_script("bfcec76e0f28"), self._render_script("817b5defb197")],
+ result,
+ )
rendered_template = self._render_template(engine)
- self.assertEqual(rendered_template, self._render_result(result, ''))
+ self.assertEqual(rendered_template, self._render_result(result, ""))
class OfflineCompressConditionTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_condition'
- expected_hash = 'a3275743dc69'
+ templates_dir = "test_condition"
+ expected_hash = "a3275743dc69"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'condition': 'red',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "condition": "red",
}
}
class OfflineCompressTemplateTagTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_templatetag'
- expected_hash = '2bb88185b4f5'
+ templates_dir = "test_templatetag"
+ expected_hash = "2bb88185b4f5"
class OfflineCompressStaticTemplateTagTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_static_templatetag'
- expected_hash = 'be0b1eade28b'
+ templates_dir = "test_static_templatetag"
+ expected_hash = "be0b1eade28b"
class OfflineCompressTemplateTagNamedTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_templatetag_named'
- expected_basename = 'output_name'
- expected_hash = '822ac7501287'
+ templates_dir = "test_templatetag_named"
+ expected_basename = "output_name"
+ expected_hash = "822ac7501287"
class OfflineCompressTestCaseWithContext(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context'
- expected_hash = 'c6bf81bca7ad'
+ templates_dir = "test_with_context"
+ expected_hash = "c6bf81bca7ad"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'content': 'OK!',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "content": "OK!",
}
}
class OfflineCompressTestCaseWithContextSuper(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context_super'
- expected_hash = 'dd79e1bd1527'
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_with_context_super"
+ expected_hash = "dd79e1bd1527"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'content': 'OK!',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "content": "OK!",
}
}
class OfflineCompressTestCaseWithContextList(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context'
- expected_hash = ['8b4a7452e1c5', '55b3123e884c', 'bfc63829cc58']
+ templates_dir = "test_with_context"
+ expected_hash = ["8b4a7452e1c5", "55b3123e884c", "bfc63829cc58"]
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': list(offline_context_generator())
+ "COMPRESS_OFFLINE_CONTEXT": list(offline_context_generator())
}
def _prepare_contexts(self, engine):
- if engine == 'django':
+ if engine == "django":
return [Context(c) for c in settings.COMPRESS_OFFLINE_CONTEXT]
- if engine == 'jinja2':
+ if engine == "jinja2":
return settings.COMPRESS_OFFLINE_CONTEXT
return None
class OfflineCompressTestCaseWithContextListSuper(
- SuperMixin, OfflineCompressTestCaseWithContextList):
- templates_dir = 'test_with_context_super'
- expected_hash = ['b39975a8f6ea', 'ed565a1d262f', '6ac9e4b29feb']
+ SuperMixin, OfflineCompressTestCaseWithContextList
+):
+ templates_dir = "test_with_context_super"
+ expected_hash = ["b39975a8f6ea", "ed565a1d262f", "6ac9e4b29feb"]
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': list(offline_context_generator())
+ "COMPRESS_OFFLINE_CONTEXT": list(offline_context_generator())
}
-class OfflineCompressTestCaseWithContextGenerator(
- OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context'
- expected_hash = ['8b4a7452e1c5', '55b3123e884c', 'bfc63829cc58']
+class OfflineCompressTestCaseWithContextGenerator(OfflineTestCaseMixin, TestCase):
+ templates_dir = "test_with_context"
+ expected_hash = ["8b4a7452e1c5", "55b3123e884c", "bfc63829cc58"]
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': 'compressor.tests.test_offline.'
- 'offline_context_generator'
+ "COMPRESS_OFFLINE_CONTEXT": "compressor.tests.test_offline."
+ "offline_context_generator"
}
def _prepare_contexts(self, engine):
module, function = get_mod_func(settings.COMPRESS_OFFLINE_CONTEXT)
contexts = getattr(import_module(module), function)()
- if engine == 'django':
+ if engine == "django":
return (Context(c) for c in contexts)
- if engine == 'jinja2':
+ if engine == "jinja2":
return contexts
return None
class OfflineCompressTestCaseWithContextGeneratorSuper(
- SuperMixin, OfflineCompressTestCaseWithContextGenerator):
- templates_dir = 'test_with_context_super'
- expected_hash = ['b39975a8f6ea', 'ed565a1d262f', '6ac9e4b29feb']
+ SuperMixin, OfflineCompressTestCaseWithContextGenerator
+):
+ templates_dir = "test_with_context_super"
+ expected_hash = ["b39975a8f6ea", "ed565a1d262f", "6ac9e4b29feb"]
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': 'compressor.tests.test_offline.'
- 'offline_context_generator'
+ "COMPRESS_OFFLINE_CONTEXT": "compressor.tests.test_offline."
+ "offline_context_generator"
}
class OfflineCompressStaticUrlIndependenceTestCase(
- OfflineCompressTestCaseWithContextGenerator):
+ OfflineCompressTestCaseWithContextGenerator
+):
"""
Test that the offline manifest is independent of STATIC_URL.
I.e. users can use the manifest with any other STATIC_URL in the future.
"""
- templates_dir = 'test_static_url_independence'
- expected_hash = 'b0bfc3754fd4'
+
+ templates_dir = "test_static_url_independence"
+ expected_hash = "b0bfc3754fd4"
additional_test_settings = {
- 'STATIC_URL': '/custom/static/url/',
+ "STATIC_URL": "/custom/static/url/",
# We use ``COMPRESS_OFFLINE_CONTEXT`` generator to make sure that
# ``STATIC_URL`` is not cached when rendering the template.
- 'COMPRESS_OFFLINE_CONTEXT': (
- 'compressor.tests.test_offline.static_url_context_generator'
- )
+ "COMPRESS_OFFLINE_CONTEXT": (
+ "compressor.tests.test_offline.static_url_context_generator"
+ ),
}
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(1, count)
self.assertEqual([self._render_script(self.expected_hash)], result)
- self.assertEqual(
- self._render_template(engine), self._render_result(result))
+ self.assertEqual(self._render_template(engine), self._render_result(result))
# Changing STATIC_URL setting doesn't break things despite that
# offline compression was made with different STATIC_URL.
- with self.settings(STATIC_URL='/another/static/url/'):
- self.assertEqual(
- self._render_template(engine), self._render_result(result))
+ with self.settings(STATIC_URL="/another/static/url/"):
+ self.assertEqual(self._render_template(engine), self._render_result(result))
class OfflineCompressTestCaseWithContextVariableInheritance(
- OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context_variable_inheritance'
- expected_hash = 'b8376aad1357'
+ OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_with_context_variable_inheritance"
+ expected_hash = "b8376aad1357"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'parent_template': 'base.html',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "parent_template": "base.html",
}
}
- def _render_result(self, result, separator='\n'):
- return '\n' + super()._render_result(result, separator)
+ def _render_result(self, result, separator="\n"):
+ return "\n" + super()._render_result(result, separator)
class OfflineCompressTestCaseWithContextVariableInheritanceSuper(
- SuperMixin, OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context_variable_inheritance_super'
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_with_context_variable_inheritance_super"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': [{
- 'parent_template': 'base1.html',
- }, {
- 'parent_template': 'base2.html',
- }]
+ "COMPRESS_OFFLINE_CONTEXT": [
+ {
+ "parent_template": "base1.html",
+ },
+ {
+ "parent_template": "base2.html",
+ },
+ ]
}
- expected_hash = ['cee48db7cedc', 'c877c436363a']
+ expected_hash = ["cee48db7cedc", "c877c436363a"]
class OfflineCompressTestCaseWithContextGeneratorImportError(
- OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_with_context'
+ OfflineTestCaseMixin, TestCase
+):
+ templates_dir = "test_with_context"
def _test_offline(self, engine, verbosity=0):
# Test that we are properly generating ImportError when
# COMPRESS_OFFLINE_CONTEXT looks like a function but can't be imported
# for whatever reason.
- with self.settings(
- COMPRESS_OFFLINE_CONTEXT='invalid_mod.invalid_func'):
+ with self.settings(COMPRESS_OFFLINE_CONTEXT="invalid_mod.invalid_func"):
# Path with invalid module name -- ImportError:
self.assertRaises(
- ImportError, CompressCommand().handle_inner, engines=[engine])
+ ImportError, CompressCommand().handle_inner, engines=[engine]
+ )
- with self.settings(COMPRESS_OFFLINE_CONTEXT='compressor'):
+ with self.settings(COMPRESS_OFFLINE_CONTEXT="compressor"):
# Valid module name only without function -- AttributeError:
self.assertRaises(
- ImportError, CompressCommand().handle_inner, engines=[engine])
+ ImportError, CompressCommand().handle_inner, engines=[engine]
+ )
with self.settings(
- COMPRESS_OFFLINE_CONTEXT='compressor.tests.invalid_function'):
+ COMPRESS_OFFLINE_CONTEXT="compressor.tests.invalid_function"
+ ):
# Path with invalid function name -- AttributeError:
self.assertRaises(
- ImportError, CompressCommand().handle_inner, engines=[engine])
+ ImportError, CompressCommand().handle_inner, engines=[engine]
+ )
- with self.settings(
- COMPRESS_OFFLINE_CONTEXT='compressor.tests.test_offline'):
+ with self.settings(COMPRESS_OFFLINE_CONTEXT="compressor.tests.test_offline"):
# Path without function attempts call on module -- TypeError:
self.assertRaises(
- ImportError, CompressCommand().handle_inner, engines=[engine])
+ ImportError, CompressCommand().handle_inner, engines=[engine]
+ )
- valid_path = 'compressor.tests.test_offline.offline_context_generator'
+ valid_path = "compressor.tests.test_offline.offline_context_generator"
with self.settings(COMPRESS_OFFLINE_CONTEXT=valid_path):
# Valid path to generator function -- no ImportError:
try:
- CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ CompressCommand().handle_inner(engines=[engine], verbosity=verbosity)
except ImportError:
- self.fail('Valid path to offline context generator must'
- ' not raise ImportError.')
+ self.fail(
+ "Valid path to offline context generator must"
+ " not raise ImportError."
+ )
class OfflineCompressTestCaseErrors(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_error_handling'
+ templates_dir = "test_error_handling"
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
- if engine == 'django':
+ if engine == "django":
self.assertEqual(2, count)
else:
# Because we use env.parse in Jinja2Parser, the engine does not
@@ -626,18 +648,18 @@ class OfflineCompressTestCaseErrors(OfflineTestCaseMixin, TestCase):
# it is unable to detect that they are missing. So all the
# 'compress' nodes are processed correctly.
self.assertEqual(4, count)
- self.assertEqual(engine, 'jinja2')
- self.assertIn(self._render_link('187e2ce75808'), result)
- self.assertIn(self._render_link('fffafcdf428e'), result)
+ self.assertEqual(engine, "jinja2")
+ self.assertIn(self._render_link("187e2ce75808"), result)
+ self.assertIn(self._render_link("fffafcdf428e"), result)
- self.assertIn(self._render_script('eeabdac29232'), result)
- self.assertIn(self._render_script('9a7f06880ce3'), result)
+ self.assertIn(self._render_script("eeabdac29232"), result)
+ self.assertIn(self._render_script("9a7f06880ce3"), result)
class OfflineCompressTestCaseWithError(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_error_handling'
+ templates_dir = "test_error_handling"
additional_test_settings = {
- 'COMPRESS_PRECOMPILERS': (('text/coffeescript', 'nonexisting-binary'),)
+ "COMPRESS_PRECOMPILERS": (("text/coffeescript", "nonexisting-binary"),)
}
def _test_offline(self, engine, verbosity=0):
@@ -648,40 +670,46 @@ class OfflineCompressTestCaseWithError(OfflineTestCaseMixin, TestCase):
"""
with self.settings(DEBUG=True):
self.assertRaises(
- CommandError, CompressCommand().handle_inner, engines=[engine],
- verbosity=verbosity)
+ CommandError,
+ CompressCommand().handle_inner,
+ engines=[engine],
+ verbosity=verbosity,
+ )
with self.settings(DEBUG=False):
self.assertRaises(
- CommandError, CompressCommand().handle_inner, engines=[engine],
- verbosity=verbosity)
+ CommandError,
+ CompressCommand().handle_inner,
+ engines=[engine],
+ verbosity=verbosity,
+ )
class OfflineCompressEmptyTag(OfflineTestCaseMixin, TestCase):
"""
- In case of a compress template tag with no content, an entry
- will be added to the manifest with an empty string as value.
- This test makes sure there is no recompression happening when
- compressor encounters such an emptystring in the manifest.
+ In case of a compress template tag with no content, an entry
+ will be added to the manifest with an empty string as value.
+ This test makes sure there is no recompression happening when
+ compressor encounters such an emptystring in the manifest.
"""
- templates_dir = 'basic'
- expected_hash = '822ac7501287'
+
+ templates_dir = "basic"
+ expected_hash = "822ac7501287"
def _test_offline(self, engine, verbosity=0):
CompressCommand().handle_inner(engines=[engine], verbosity=verbosity)
manifest = get_offline_manifest()
- manifest[list(manifest)[0]] = ''
- self.assertEqual(self._render_template(engine), '\n')
+ manifest[list(manifest)[0]] = ""
+ self.assertEqual(self._render_template(engine), "\n")
class OfflineCompressBlockSuperBaseCompressed(OfflineTestCaseMixin, TestCase):
- template_names = ['base.html', 'base2.html',
- 'test_compressor_offline.html']
- templates_dir = 'test_block_super_base_compressed'
- expected_hash_offline = ['e4e9263fa4c0', '9cecd41a505f', 'd3f749e83c81']
- expected_hash = ['028c3fc42232', '2e9d3f5545a6', 'd3f749e83c81']
+ template_names = ["base.html", "base2.html", "test_compressor_offline.html"]
+ templates_dir = "test_block_super_base_compressed"
+ expected_hash_offline = ["e4e9263fa4c0", "9cecd41a505f", "d3f749e83c81"]
+ expected_hash = ["028c3fc42232", "2e9d3f5545a6", "d3f749e83c81"]
# Block.super not supported for Jinja2 yet.
- engines = ('django',)
+ engines = ("django",)
def setUp(self):
super().setUp()
@@ -690,72 +718,78 @@ class OfflineCompressBlockSuperBaseCompressed(OfflineTestCaseMixin, TestCase):
self.templates = []
for template_name in self.template_names:
template_path = os.path.join(
- settings.TEMPLATES[0]['DIRS'][0], template_name)
+ settings.TEMPLATES[0]["DIRS"][0], template_name
+ )
self.template_paths.append(template_path)
- with io.open(template_path,
- encoding=self.CHARSET) as file_:
+ with io.open(template_path, encoding=self.CHARSET) as file_:
template = Template(file_.read())
self.templates.append(template)
def _render_template(self, template, engine):
- if engine == 'django':
+ if engine == "django":
return template.render(Context(settings.COMPRESS_OFFLINE_CONTEXT))
- elif engine == 'jinja2':
- return template.render(settings.COMPRESS_OFFLINE_CONTEXT) + '\n'
+ elif engine == "jinja2":
+ return template.render(settings.COMPRESS_OFFLINE_CONTEXT) + "\n"
else:
return None
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(len(self.expected_hash), count)
for expected_hash, template in zip(self.expected_hash_offline, self.templates):
expected = self._render_script(expected_hash)
self.assertIn(expected, result)
rendered_template = self._render_template(template, engine)
- self.assertEqual(
- rendered_template, self._render_result([expected]))
+ self.assertEqual(rendered_template, self._render_result([expected]))
class OfflineCompressInlineNonAsciiTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_inline_non_ascii'
+ templates_dir = "test_inline_non_ascii"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'test_non_ascii_value': '\u2014',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "test_non_ascii_value": "\u2014",
}
}
def _test_offline(self, engine, verbosity=0):
_, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
rendered_template = self._render_template(engine)
- self.assertEqual(rendered_template, ''.join(result) + '\n')
+ self.assertEqual(rendered_template, "".join(result) + "\n")
class OfflineCompressComplexTestCase(OfflineTestCaseMixin, TestCase):
- templates_dir = 'test_complex'
+ templates_dir = "test_complex"
additional_test_settings = {
- 'COMPRESS_OFFLINE_CONTEXT': {
- 'condition': 'OK!',
+ "COMPRESS_OFFLINE_CONTEXT": {
+ "condition": "OK!",
# Django templating does not allow definition of tuples in the
# templates.
# Make sure this is same as test_templates_jinja2/test_complex.
- 'my_names': ('js/one.js', 'js/nonasc.js'),
+ "my_names": ("js/one.js", "js/nonasc.js"),
}
}
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(3, count)
- self.assertEqual([
- self._render_script('76a82cfab9ab'),
- self._render_script('7219642b8ab4'),
- self._render_script('567bb77b13db')
- ], result)
+ self.assertEqual(
+ [
+ self._render_script("76a82cfab9ab"),
+ self._render_script("7219642b8ab4"),
+ self._render_script("567bb77b13db"),
+ ],
+ result,
+ )
rendered_template = self._render_template(engine)
self.assertEqual(
- rendered_template, self._render_result([result[0], result[2]], ''))
+ rendered_template, self._render_result([result[0], result[2]], "")
+ )
class OfflineCompressExtendsRecursionTestCase(OfflineTestCaseMixin, TestCase):
@@ -763,30 +797,33 @@ class OfflineCompressExtendsRecursionTestCase(OfflineTestCaseMixin, TestCase):
Test that templates extending templates with the same name
(e.g. admin/index.html) don't cause an infinite test_extends_recursion
"""
- templates_dir = 'test_extends_recursion'
+
+ templates_dir = "test_extends_recursion"
INSTALLED_APPS = [
- 'django.contrib.admin',
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.staticfiles',
- 'compressor',
+ "django.contrib.admin",
+ "django.contrib.auth",
+ "django.contrib.contenttypes",
+ "django.contrib.staticfiles",
+ "compressor",
]
@override_settings(INSTALLED_APPS=INSTALLED_APPS)
def _test_offline(self, engine, verbosity=0):
- count, _ = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ count, _ = CompressCommand().handle_inner(engines=[engine], verbosity=verbosity)
self.assertEqual(count, 1)
-class OfflineCompressExtendsRelativeTestCase(SuperMixin, OfflineTestCaseMixin, TestCase):
+class OfflineCompressExtendsRelativeTestCase(
+ SuperMixin, OfflineTestCaseMixin, TestCase
+):
"""
Test that templates extending templates using relative paths
(e.g. ./base.html) are evaluated correctly
"""
- templates_dir = 'test_extends_relative'
- expected_hash = '817b5defb197'
+
+ templates_dir = "test_extends_relative"
+ expected_hash = "817b5defb197"
class TestCompressCommand(OfflineTestCaseMixin, TestCase):
@@ -796,9 +833,7 @@ class TestCompressCommand(OfflineTestCaseMixin, TestCase):
raise SkipTest("Not utilized for this test case")
def _build_expected_manifest(self, expected):
- return {
- k: self._render_script(v) for k, v in expected.items()
- }
+ return {k: self._render_script(v) for k, v in expected.items()}
def test_multiple_engines(self):
opts = {
@@ -806,27 +841,38 @@ class TestCompressCommand(OfflineTestCaseMixin, TestCase):
"verbosity": 0,
}
- call_command('compress', engines=["django"], **opts)
+ call_command("compress", engines=["django"], **opts)
manifest_django = get_offline_manifest()
manifest_django_expected = self._build_expected_manifest(
- {'0fed9c02607acba22316a328075a81a74e0983ae79470daa9d3707a337623dc3': '0241107e9a9a'})
+ {
+ "0fed9c02607acba22316a328075a81a74e0983ae79470daa9d3707a337623dc3": "0241107e9a9a"
+ }
+ )
self.assertEqual(manifest_django, manifest_django_expected)
- call_command('compress', engines=["jinja2"], **opts)
+ call_command("compress", engines=["jinja2"], **opts)
manifest_jinja2 = get_offline_manifest()
manifest_jinja2_expected = self._build_expected_manifest(
- {'077408d23d4a829b8f88db2eadcf902b29d71b14f94018d900f38a3f8ed24c94': '5694ca83dd14'})
+ {
+ "077408d23d4a829b8f88db2eadcf902b29d71b14f94018d900f38a3f8ed24c94": "5694ca83dd14"
+ }
+ )
self.assertEqual(manifest_jinja2, manifest_jinja2_expected)
- call_command('compress', engines=["django", "jinja2"], **opts)
+ call_command("compress", engines=["django", "jinja2"], **opts)
manifest_both = get_offline_manifest()
manifest_both_expected = self._build_expected_manifest(
- {'0fed9c02607acba22316a328075a81a74e0983ae79470daa9d3707a337623dc3': '0241107e9a9a',
- '077408d23d4a829b8f88db2eadcf902b29d71b14f94018d900f38a3f8ed24c94': '5694ca83dd14'})
+ {
+ "0fed9c02607acba22316a328075a81a74e0983ae79470daa9d3707a337623dc3": "0241107e9a9a",
+ "077408d23d4a829b8f88db2eadcf902b29d71b14f94018d900f38a3f8ed24c94": "5694ca83dd14",
+ }
+ )
self.assertEqual(manifest_both, manifest_both_expected)
-class OfflineCompressTestCaseWithLazyStringAlikeUrls(OfflineCompressTestCaseWithContextGenerator):
+class OfflineCompressTestCaseWithLazyStringAlikeUrls(
+ OfflineCompressTestCaseWithContextGenerator
+):
"""
Test offline compressing with ``STATIC_URL`` and ``COMPRESS_URL`` as instances of
*lazy string-alike objects* instead of strings.
@@ -851,35 +897,35 @@ class OfflineCompressTestCaseWithLazyStringAlikeUrls(OfflineCompressTestCaseWith
correctly - e.g. ``some_string.replace(STATIC_URL, '...')``. So we need to do explicit
``str`` type cast: ``some_string.replace(str(STATIC_URL), '...')``.
"""
- templates_dir = 'test_static_templatetag'
+
+ templates_dir = "test_static_templatetag"
additional_test_settings = {
- 'STATIC_URL': LazyScriptNamePrefixedUrl('/static/'),
- 'COMPRESS_URL': LazyScriptNamePrefixedUrl('/static/'),
+ "STATIC_URL": LazyScriptNamePrefixedUrl("/static/"),
+ "COMPRESS_URL": LazyScriptNamePrefixedUrl("/static/"),
# We use ``COMPRESS_OFFLINE_CONTEXT`` generator to make sure that
# ``STATIC_URL`` is not cached when rendering the template.
- 'COMPRESS_OFFLINE_CONTEXT': (
- 'compressor.tests.test_offline.static_url_context_generator'
- )
+ "COMPRESS_OFFLINE_CONTEXT": (
+ "compressor.tests.test_offline.static_url_context_generator"
+ ),
}
- expected_hash = 'be0b1eade28b'
+ expected_hash = "be0b1eade28b"
def _test_offline(self, engine, verbosity=0):
count, result = CompressCommand().handle_inner(
- engines=[engine], verbosity=verbosity)
+ engines=[engine], verbosity=verbosity
+ )
self.assertEqual(1, count)
# Change ``SCRIPT_NAME`` WSGI param - it can be changed on every HTTP request,
# e.g. passed via HTTP header.
- for script_name in ['', '/app/prefix/', '/another/prefix/']:
+ for script_name in ["", "/app/prefix/", "/another/prefix/"]:
with script_prefix(script_name):
self.assertEqual(
- str(settings.STATIC_URL),
- script_name.rstrip('/') + '/static/'
+ str(settings.STATIC_URL), script_name.rstrip("/") + "/static/"
)
self.assertEqual(
- str(settings.COMPRESS_URL),
- script_name.rstrip('/') + '/static/'
+ str(settings.COMPRESS_URL), script_name.rstrip("/") + "/static/"
)
expected_result = self._render_result(result)
diff --git a/compressor/tests/test_parsers.py b/compressor/tests/test_parsers.py
index 5bc9961..e5266d9 100644
--- a/compressor/tests/test_parsers.py
+++ b/compressor/tests/test_parsers.py
@@ -28,14 +28,14 @@ class ParserTestCase:
self.override_settings.__exit__(None, None, None)
-@unittest.skipIf(lxml is None, 'lxml not found')
+@unittest.skipIf(lxml is None, "lxml not found")
class LxmlParserTests(ParserTestCase, CompressorTestCase):
- parser_cls = 'compressor.parser.LxmlParser'
+ parser_cls = "compressor.parser.LxmlParser"
-@unittest.skipIf(html5lib is None, 'html5lib not found')
+@unittest.skipIf(html5lib is None, "html5lib not found")
class Html5LibParserTests(ParserTestCase, CompressorTestCase):
- parser_cls = 'compressor.parser.Html5LibParser'
+ parser_cls = "compressor.parser.Html5LibParser"
# Special test variants required since xml.etree holds attributes
# as a plain dictionary, e.g. key order is unpredictable.
@@ -43,57 +43,54 @@ class Html5LibParserTests(ParserTestCase, CompressorTestCase):
split = self.css_node.split_contents()
out0 = (
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'one.css'),
- 'css/one.css',
- '{http://www.w3.org/1999/xhtml}link',
- {'rel': 'stylesheet', 'href': '/static/css/one.css',
- 'type': 'text/css'},
+ os.path.join(settings.COMPRESS_ROOT, "css", "one.css"),
+ "css/one.css",
+ "{http://www.w3.org/1999/xhtml}link",
+ {"rel": "stylesheet", "href": "/static/css/one.css", "type": "text/css"},
)
- self.assertEqual(out0, split[0][:3] + (split[0][3].tag,
- split[0][3].attrib))
+ self.assertEqual(out0, split[0][:3] + (split[0][3].tag, split[0][3].attrib))
out1 = (
SOURCE_HUNK,
- 'p { border:5px solid green;}',
+ "p { border:5px solid green;}",
None,
'<style type="text/css">p { border:5px solid green;}</style>',
)
- self.assertEqual(out1, split[1][:3]
- + (self.css_node.parser.elem_str(split[1][3]),))
+ self.assertEqual(
+ out1, split[1][:3] + (self.css_node.parser.elem_str(split[1][3]),)
+ )
out2 = (
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'two.css'),
- 'css/two.css',
- '{http://www.w3.org/1999/xhtml}link',
- {'rel': 'stylesheet', 'href': '/static/css/two.css',
- 'type': 'text/css'},
+ os.path.join(settings.COMPRESS_ROOT, "css", "two.css"),
+ "css/two.css",
+ "{http://www.w3.org/1999/xhtml}link",
+ {"rel": "stylesheet", "href": "/static/css/two.css", "type": "text/css"},
)
- self.assertEqual(out2, split[2][:3] + (split[2][3].tag,
- split[2][3].attrib))
+ self.assertEqual(out2, split[2][:3] + (split[2][3].tag, split[2][3].attrib))
def test_js_split(self):
split = self.js_node.split_contents()
out0 = (
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'js', 'one.js'),
- 'js/one.js',
- '{http://www.w3.org/1999/xhtml}script',
- {'src': '/static/js/one.js', 'type': 'text/javascript'},
+ os.path.join(settings.COMPRESS_ROOT, "js", "one.js"),
+ "js/one.js",
+ "{http://www.w3.org/1999/xhtml}script",
+ {"src": "/static/js/one.js", "type": "text/javascript"},
None,
)
- self.assertEqual(out0, split[0][:3] + (split[0][3].tag,
- split[0][3].attrib,
- split[0][3].text))
+ self.assertEqual(
+ out0, split[0][:3] + (split[0][3].tag, split[0][3].attrib, split[0][3].text)
+ )
out1 = (
SOURCE_HUNK,
'obj.value = "value";',
None,
- '{http://www.w3.org/1999/xhtml}script',
- {'type': 'text/javascript'},
+ "{http://www.w3.org/1999/xhtml}script",
+ {"type": "text/javascript"},
'obj.value = "value";',
)
- self.assertEqual(out1, split[1][:3] + (split[1][3].tag,
- split[1][3].attrib,
- split[1][3].text))
+ self.assertEqual(
+ out1, split[1][:3] + (split[1][3].tag, split[1][3].attrib, split[1][3].text)
+ )
@override_settings(COMPRESS_ENABLED=False)
def test_css_return_if_off(self):
@@ -111,7 +108,7 @@ class Html5LibParserTests(ParserTestCase, CompressorTestCase):
class BeautifulSoupParserTests(ParserTestCase, CompressorTestCase):
- parser_cls = 'compressor.parser.BeautifulSoupParser'
+ parser_cls = "compressor.parser.BeautifulSoupParser"
# just like in the Html5LibParserTests, provide special tests because
# in bs4 attributes are held in dictionaries
@@ -119,30 +116,29 @@ class BeautifulSoupParserTests(ParserTestCase, CompressorTestCase):
split = self.css_node.split_contents()
out0 = (
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'one.css'),
- 'css/one.css',
+ os.path.join(settings.COMPRESS_ROOT, "css", "one.css"),
+ "css/one.css",
None,
None,
)
- self.assertEqual(out0, split[0][:3] + (split[0][3].tag,
- split[0][3].attrib))
+ self.assertEqual(out0, split[0][:3] + (split[0][3].tag, split[0][3].attrib))
out1 = (
SOURCE_HUNK,
- 'p { border:5px solid green;}',
+ "p { border:5px solid green;}",
None,
'<style type="text/css">p { border:5px solid green;}</style>',
)
- self.assertEqual(out1, split[1][:3]
- + (self.css_node.parser.elem_str(split[1][3]),))
+ self.assertEqual(
+ out1, split[1][:3] + (self.css_node.parser.elem_str(split[1][3]),)
+ )
out2 = (
SOURCE_FILE,
- os.path.join(settings.COMPRESS_ROOT, 'css', 'two.css'),
- 'css/two.css',
+ os.path.join(settings.COMPRESS_ROOT, "css", "two.css"),
+ "css/two.css",
None,
None,
)
- self.assertEqual(out2, split[2][:3] + (split[2][3].tag,
- split[2][3].attrib))
+ self.assertEqual(out2, split[2][:3] + (split[2][3].tag, split[2][3].attrib))
@override_settings(COMPRESS_ENABLED=False)
def test_css_return_if_off(self):
@@ -150,4 +146,4 @@ class BeautifulSoupParserTests(ParserTestCase, CompressorTestCase):
class HtmlParserTests(ParserTestCase, CompressorTestCase):
- parser_cls = 'compressor.parser.HtmlParser'
+ parser_cls = "compressor.parser.HtmlParser"
diff --git a/compressor/tests/test_sekizai.py b/compressor/tests/test_sekizai.py
index 8273609..ee736ba 100644
--- a/compressor/tests/test_sekizai.py
+++ b/compressor/tests/test_sekizai.py
@@ -7,33 +7,38 @@ class TestSekizaiCompressorExtension(TestCase):
"""
Test case for Sekizai extension.
"""
+
def test_postprocess_js(self):
- template_string = '''
+ template_string = """
{% load static compress sekizai_tags %}
{% addtoblock "js" %}<script src="{% static 'js/one.js' %}" type="text/javascript"></script>{% endaddtoblock %}
{% addtoblock "js" %}<script async="async" defer="defer" src="https://maps.googleapis.com/maps/api/js?key={{ apiKey }}"></script>{% endaddtoblock %}
{% addtoblock "js" %}<script src="{% static 'js/two.js' %}" type="text/javascript"></script>{% endaddtoblock %}
{% addtoblock "js" %}<script src="https://code.jquery.com/jquery-3.3.1.min.js" type="text/javascript"></script>{% endaddtoblock %}
{% addtoblock "js" %}<script src="{% static 'js/three.js' %}" type="text/javascript"></script>{% endaddtoblock %}
-{% render_block "js" postprocessor "compressor.contrib.sekizai.compress" %}'''
+{% render_block "js" postprocessor "compressor.contrib.sekizai.compress" %}"""
template = Template(template_string)
- context = SekizaiContext({'apiKey': 'XYZ'})
+ context = SekizaiContext({"apiKey": "XYZ"})
html = template.render(context).strip()
- self.assertEqual(html,
-'''<script src="https://code.jquery.com/jquery-3.3.1.min.js" type="text/javascript"></script>
+ self.assertEqual(
+ html,
+ """<script src="https://code.jquery.com/jquery-3.3.1.min.js" type="text/javascript"></script>
<script src="/static/CACHE/js/output.e682d84f6b17.js"></script>
-<script async="async" defer="defer" src="https://maps.googleapis.com/maps/api/js?key=XYZ"></script>''')
+<script async="async" defer="defer" src="https://maps.googleapis.com/maps/api/js?key=XYZ"></script>""",
+ )
def test_postprocess_css(self):
- template_string = '''
+ template_string = """
{% load static compress sekizai_tags %}
{% addtoblock "css" %}<link href="{% static 'css/one.css' %}" rel="stylesheet" type="text/css" />{% endaddtoblock %}
{% addtoblock "css" %}<link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.5/css/select2.min.css" rel="stylesheet" type="text/css" />{% endaddtoblock %}
{% addtoblock "css" %}<link href="{% static 'css/two.css' %}" rel="stylesheet" type="text/css" />{% endaddtoblock %}
-{% render_block "css" postprocessor "compressor.contrib.sekizai.compress" %}'''
+{% render_block "css" postprocessor "compressor.contrib.sekizai.compress" %}"""
template = Template(template_string)
context = SekizaiContext()
html = template.render(context).strip()
- self.assertEqual(html,
-'''<link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.5/css/select2.min.css" rel="stylesheet" type="text/css">
-<link rel="stylesheet" href="/static/CACHE/css/output.44f040b05f91.css" type="text/css">''')
+ self.assertEqual(
+ html,
+ """<link href="https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.5/css/select2.min.css" rel="stylesheet" type="text/css">
+<link rel="stylesheet" href="/static/CACHE/css/output.44f040b05f91.css" type="text/css">""",
+ )
diff --git a/compressor/tests/test_signals.py b/compressor/tests/test_signals.py
index 8cc05b9..5e26714 100644
--- a/compressor/tests/test_signals.py
+++ b/compressor/tests/test_signals.py
@@ -8,9 +8,7 @@ from compressor.signals import post_compress
@override_settings(
- COMPRESS_ENABLED=True,
- COMPRESS_PRECOMPILERS=(),
- COMPRESS_DEBUG_TOGGLE='nocompress'
+ COMPRESS_ENABLED=True, COMPRESS_PRECOMPILERS=(), COMPRESS_DEBUG_TOGGLE="nocompress"
)
class PostCompressSignalTestCase(TestCase):
def setUp(self):
@@ -18,12 +16,12 @@ class PostCompressSignalTestCase(TestCase):
<link rel="stylesheet" href="/static/css/one.css" type="text/css">
<style type="text/css">p { border:5px solid green;}</style>
<link rel="stylesheet" href="/static/css/two.css" type="text/css">"""
- self.css_node = CssCompressor('css', self.css)
+ self.css_node = CssCompressor("css", self.css)
self.js = """\
<script src="/static/js/one.js" type="text/javascript"></script>
<script type="text/javascript">obj.value = "value";</script>"""
- self.js_node = JsCompressor('js', self.js)
+ self.js_node = JsCompressor("js", self.js)
def tearDown(self):
post_compress.disconnect()
@@ -31,38 +29,41 @@ class PostCompressSignalTestCase(TestCase):
def test_js_signal_sent(self):
def listener(sender, **kwargs):
pass
+
callback = Mock(wraps=listener)
post_compress.connect(callback)
self.js_node.output()
args, kwargs = callback.call_args
- self.assertEqual(JsCompressor, kwargs['sender'])
- self.assertEqual('js', kwargs['type'])
- self.assertEqual('file', kwargs['mode'])
- context = kwargs['context']
- assert 'url' in context['compressed']
+ self.assertEqual(JsCompressor, kwargs["sender"])
+ self.assertEqual("js", kwargs["type"])
+ self.assertEqual("file", kwargs["mode"])
+ context = kwargs["context"]
+ assert "url" in context["compressed"]
def test_css_signal_sent(self):
def listener(sender, **kwargs):
pass
+
callback = Mock(wraps=listener)
post_compress.connect(callback)
self.css_node.output()
args, kwargs = callback.call_args
- self.assertEqual(CssCompressor, kwargs['sender'])
- self.assertEqual('css', kwargs['type'])
- self.assertEqual('file', kwargs['mode'])
- context = kwargs['context']
- assert 'url' in context['compressed']
+ self.assertEqual(CssCompressor, kwargs["sender"])
+ self.assertEqual("css", kwargs["type"])
+ self.assertEqual("file", kwargs["mode"])
+ context = kwargs["context"]
+ assert "url" in context["compressed"]
def test_css_signal_multiple_media_attributes(self):
css = """\
<link rel="stylesheet" href="/static/css/one.css" media="handheld" type="text/css" />
<style type="text/css" media="print">p { border:5px solid green;}</style>
<link rel="stylesheet" href="/static/css/two.css" type="text/css">"""
- css_node = CssCompressor('css', css)
+ css_node = CssCompressor("css", css)
def listener(sender, **kwargs):
pass
+
callback = Mock(wraps=listener)
post_compress.connect(callback)
css_node.output()
diff --git a/compressor/tests/test_storages.py b/compressor/tests/test_storages.py
index 112c250..9ad711c 100644
--- a/compressor/tests/test_storages.py
+++ b/compressor/tests/test_storages.py
@@ -15,12 +15,16 @@ from compressor.tests.test_templatetags import render
class GzipStorage(LazyObject):
def _setup(self):
- self._wrapped = get_storage_class('compressor.storage.GzipCompressorFileStorage')()
+ self._wrapped = get_storage_class(
+ "compressor.storage.GzipCompressorFileStorage"
+ )()
class BrotliStorage(LazyObject):
def _setup(self):
- self._wrapped = get_storage_class('compressor.storage.BrotliCompressorFileStorage')()
+ self._wrapped = get_storage_class(
+ "compressor.storage.BrotliCompressorFileStorage"
+ )()
@override_settings(COMPRESS_ENABLED=True)
@@ -34,20 +38,28 @@ class StorageTestCase(TestCase):
storage.default_storage = self.default_storage
def test_gzip_storage(self):
- storage.default_storage.save('test.txt', ContentFile('yeah yeah'))
- self.assertTrue(os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt')))
- self.assertTrue(os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt.gz')))
+ storage.default_storage.save("test.txt", ContentFile("yeah yeah"))
+ self.assertTrue(
+ os.path.exists(os.path.join(settings.COMPRESS_ROOT, "test.txt"))
+ )
+ self.assertTrue(
+ os.path.exists(os.path.join(settings.COMPRESS_ROOT, "test.txt.gz"))
+ )
def test_brotli_storage(self):
- payload = ','.join([str(i) for i in range(1000)]).encode()
+ payload = ",".join([str(i) for i in range(1000)]).encode()
chunk_size = 1024
- storage.brotli_storage.save('test.txt', ContentFile(payload))
- self.assertTrue(os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt')))
- self.assertTrue(os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'test.txt.br')))
- decompressed_data = b''
+ storage.brotli_storage.save("test.txt", ContentFile(payload))
+ self.assertTrue(
+ os.path.exists(os.path.join(settings.COMPRESS_ROOT, "test.txt"))
+ )
+ self.assertTrue(
+ os.path.exists(os.path.join(settings.COMPRESS_ROOT, "test.txt.br"))
+ )
+ decompressed_data = b""
br_decompressor = brotli.Decompressor()
- with open(os.path.join(settings.COMPRESS_ROOT, 'test.txt.br'), 'rb') as f:
- for data in iter(lambda: f.read(chunk_size), b''):
+ with open(os.path.join(settings.COMPRESS_ROOT, "test.txt.br"), "rb") as f:
+ for data in iter(lambda: f.read(chunk_size), b""):
decompressed_data += br_decompressor.process(data)
self.assertEqual(payload, decompressed_data)
@@ -58,18 +70,22 @@ class StorageTestCase(TestCase):
<link rel="stylesheet" href="{{ STATIC_URL }}css/two.css" type="text/css">
{% endcompress %}
"""
- context = {'STATIC_URL': settings.COMPRESS_URL}
+ context = {"STATIC_URL": settings.COMPRESS_URL}
out = css_tag("/static/CACHE/css/output.e701f86c6430.css")
self.assertEqual(out, render(template, context))
def test_duplicate_save_overwrites_same_file(self):
- filename1 = self.default_storage.save('test.txt', ContentFile('yeah yeah'))
- filename2 = self.default_storage.save('test.txt', ContentFile('yeah yeah'))
+ filename1 = self.default_storage.save("test.txt", ContentFile("yeah yeah"))
+ filename2 = self.default_storage.save("test.txt", ContentFile("yeah yeah"))
self.assertEqual(filename1, filename2)
self.assertNotIn("_", filename2)
def test_offline_manifest_storage(self):
- storage.default_offline_manifest_storage.save('test.txt', ContentFile('yeah yeah'))
- self.assertTrue(os.path.exists(os.path.join(settings.COMPRESS_ROOT, 'CACHE', 'test.txt')))
+ storage.default_offline_manifest_storage.save(
+ "test.txt", ContentFile("yeah yeah")
+ )
+ self.assertTrue(
+ os.path.exists(os.path.join(settings.COMPRESS_ROOT, "CACHE", "test.txt"))
+ )
# Check that the file is stored at the same default location as before the new manifest storage.
- self.assertTrue(self.default_storage.exists(os.path.join('CACHE', 'test.txt')))
+ self.assertTrue(self.default_storage.exists(os.path.join("CACHE", "test.txt")))
diff --git a/compressor/tests/test_templatetags.py b/compressor/tests/test_templatetags.py
index 4bd4418..fec13c5 100644
--- a/compressor/tests/test_templatetags.py
+++ b/compressor/tests/test_templatetags.py
@@ -27,12 +27,12 @@ def render(template_string, context_dict=None, context=None):
@override_settings(COMPRESS_ENABLED=True)
class TemplatetagTestCase(TestCase):
def setUp(self):
- self.context = {'STATIC_URL': settings.COMPRESS_URL}
+ self.context = {"STATIC_URL": settings.COMPRESS_URL}
def test_empty_tag(self):
template = """{% load compress %}{% compress js %}{% block js %}
{% endblock %}{% endcompress %}"""
- self.assertEqual('', render(template, self.context))
+ self.assertEqual("", render(template, self.context))
def test_css_tag(self):
template = """{% load compress %}{% compress css %}
@@ -120,7 +120,7 @@ class TemplatetagTestCase(TestCase):
{% endcompress %}"""
self.assertRaises(TemplateSyntaxError, render, template, {})
- @override_settings(COMPRESS_DEBUG_TOGGLE='togglecompress')
+ @override_settings(COMPRESS_DEBUG_TOGGLE="togglecompress")
def test_debug_toggle(self):
template = """{% load compress %}{% compress js %}
<script src="{{ STATIC_URL }}js/one.js" type="text/javascript"></script>
@@ -129,7 +129,7 @@ class TemplatetagTestCase(TestCase):
"""
class MockDebugRequest:
- GET = {settings.COMPRESS_DEBUG_TOGGLE: 'true'}
+ GET = {settings.COMPRESS_DEBUG_TOGGLE: "true"}
context = dict(self.context, request=MockDebugRequest())
out = """<script src="/static/js/one.js" type="text/javascript"></script>
@@ -146,9 +146,11 @@ class TemplatetagTestCase(TestCase):
<link rel="stylesheet" href="{{ STATIC_URL }}css/two.css" type="text/css">
{% endcompress %}"""
out_js = '<script>obj={};;obj.value="value";;</script>'
- out_css = ('<style type="text/css">body{background:#990}'
- 'p{border:5px solid green}'
- 'body{color:#fff}</style>')
+ out_css = (
+ '<style type="text/css">body{background:#990}'
+ "p{border:5px solid green}"
+ "body{color:#fff}</style>"
+ )
self.assertEqual(out_js + out_css, render(template, self.context))
def test_named_compress_tag(self):
@@ -159,12 +161,13 @@ class TemplatetagTestCase(TestCase):
def listener(sender, **kwargs):
pass
+
callback = Mock(wraps=listener)
post_compress.connect(callback)
render(template)
args, kwargs = callback.call_args
- context = kwargs['context']
- self.assertEqual('foo', context['compressed']['name'])
+ context = kwargs["context"]
+ self.assertEqual("foo", context["compressed"]["name"])
def test_sekizai_only_once(self):
template = """{% load sekizai_tags %}{% addtoblock "js" %}
@@ -176,22 +179,21 @@ class TemplatetagTestCase(TestCase):
class PrecompilerTemplatetagTestCase(TestCase):
-
def setUp(self):
- precompiler = os.path.join(test_dir, 'precompiler.py')
+ precompiler = os.path.join(test_dir, "precompiler.py")
python = sys.executable
override_settings = {
- 'COMPRESS_ENABLED': True,
- 'COMPRESS_PRECOMPILERS': (
- ('text/coffeescript', '%s %s' % (python, precompiler)),
- ('text/less', '%s %s' % (python, precompiler)),
- )
+ "COMPRESS_ENABLED": True,
+ "COMPRESS_PRECOMPILERS": (
+ ("text/coffeescript", "%s %s" % (python, precompiler)),
+ ("text/less", "%s %s" % (python, precompiler)),
+ ),
}
self.override_settings = self.settings(**override_settings)
self.override_settings.__enter__()
- self.context = {'STATIC_URL': settings.COMPRESS_URL}
+ self.context = {"STATIC_URL": settings.COMPRESS_URL}
def tearDown(self):
self.override_settings.__exit__(None, None, None)
@@ -217,8 +219,11 @@ class PrecompilerTemplatetagTestCase(TestCase):
<script type="text/coffeescript"># this is a comment.</script>
<script type="text/javascript"># this too is a comment.</script>
{% endcompress %}"""
- out = (script('# this is a comment.\n') + '\n'
- + script('# this too is a comment.', scripttype="text/javascript"))
+ out = (
+ script("# this is a comment.\n")
+ + "\n"
+ + script("# this too is a comment.", scripttype="text/javascript")
+ )
self.assertEqual(out, render(template, self.context))
@override_settings(COMPRESS_ENABLED=False)
@@ -251,9 +256,13 @@ class PrecompilerTemplatetagTestCase(TestCase):
</script>
{% endcompress %}"""
- out = '\n'.join([script(src="/static/CACHE/js/one.4b3570601b8c.js"),
- script(scripttype="", src="/static/js/one.js"),
- script(src="/static/CACHE/js/one.8ab93aace8fa.js")])
+ out = "\n".join(
+ [
+ script(src="/static/CACHE/js/one.4b3570601b8c.js"),
+ script(scripttype="", src="/static/js/one.js"),
+ script(src="/static/CACHE/js/one.8ab93aace8fa.js"),
+ ]
+ )
self.assertEqual(out, render(template, self.context))
@@ -266,8 +275,12 @@ class PrecompilerTemplatetagTestCase(TestCase):
<link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}css/two.css"></link>
{% endcompress %}"""
- out = ''.join(['<link rel="stylesheet" type="text/css" href="/static/css/one.css">',
- '<link rel="stylesheet" type="text/css" href="/static/css/two.css">'])
+ out = "".join(
+ [
+ '<link rel="stylesheet" type="text/css" href="/static/css/one.css">',
+ '<link rel="stylesheet" type="text/css" href="/static/css/two.css">',
+ ]
+ )
self.assertEqual(out, render(template, self.context))
@@ -281,9 +294,13 @@ class PrecompilerTemplatetagTestCase(TestCase):
<link rel="stylesheet" type="text/less" href="{{ STATIC_URL }}css/url/test.css"/>
{% endcompress %}"""
- out = ''.join(['<link rel="stylesheet" type="text/css" href="/static/css/one.css">',
- '<link rel="stylesheet" type="text/css" href="/static/css/two.css">',
- '<link rel="stylesheet" href="/static/CACHE/css/test.222f958fb191.css" type="text/css">'])
+ out = "".join(
+ [
+ '<link rel="stylesheet" type="text/css" href="/static/css/one.css">',
+ '<link rel="stylesheet" type="text/css" href="/static/css/two.css">',
+ '<link rel="stylesheet" href="/static/CACHE/css/test.222f958fb191.css" type="text/css">',
+ ]
+ )
self.assertEqual(out, render(template, self.context))
@@ -294,9 +311,9 @@ def script(content="", src="", scripttype=""):
>>> script('#this is a comment', scripttype="text/applescript")
'<script type="text/applescript">#this is a comment</script>'
"""
- out_script = '<script '
+ out_script = "<script "
if scripttype:
out_script += 'type="%s" ' % scripttype
if src:
out_script += 'src="%s" ' % src
- return out_script[:-1] + '>%s</script>' % content
+ return out_script[:-1] + ">%s</script>" % content
diff --git a/compressor/tests/test_utils.py b/compressor/tests/test_utils.py
index 1e8346f..78b11de 100644
--- a/compressor/tests/test_utils.py
+++ b/compressor/tests/test_utils.py
@@ -12,29 +12,30 @@ from imp import reload
def get_apps_without_staticfiles(apps):
- return [x for x in apps if x != 'django.contrib.staticfiles']
+ return [x for x in apps if x != "django.contrib.staticfiles"]
def get_apps_with_staticfiles_using_appconfig(apps):
return get_apps_without_staticfiles(apps) + [
- 'django.contrib.staticfiles.apps.StaticFilesConfig',
+ "django.contrib.staticfiles.apps.StaticFilesConfig",
]
class StaticFilesTestCase(TestCase):
-
def test_has_finders_from_staticfiles(self):
- self.assertTrue(compressor.utils.staticfiles.finders is
- django.contrib.staticfiles.finders)
+ self.assertTrue(
+ compressor.utils.staticfiles.finders is django.contrib.staticfiles.finders
+ )
def test_has_finders_from_staticfiles_if_configured_per_appconfig(self):
- apps = get_apps_with_staticfiles_using_appconfig(
- settings.INSTALLED_APPS)
+ apps = get_apps_with_staticfiles_using_appconfig(settings.INSTALLED_APPS)
try:
with override_settings(INSTALLED_APPS=apps):
reload(compressor.utils.staticfiles)
- self.assertTrue(compressor.utils.staticfiles.finders is
- django.contrib.staticfiles.finders)
+ self.assertTrue(
+ compressor.utils.staticfiles.finders
+ is django.contrib.staticfiles.finders
+ )
finally:
reload(compressor.utils.staticfiles)
@@ -49,10 +50,13 @@ class StaticFilesTestCase(TestCase):
class TestGetClass(TestCase):
-
def test_get_class_import_exception(self):
with self.assertRaises(FilterError) as context:
- get_class('common.uglify.JsUglifySourcemapCompressor')
-
- self.assertTrue(('Failed to import common.uglify.JsUglifySourcemapCompressor. '
- 'ImportError is: No module named' in str(context.exception)))
+ get_class("common.uglify.JsUglifySourcemapCompressor")
+
+ self.assertTrue(
+ (
+ "Failed to import common.uglify.JsUglifySourcemapCompressor. "
+ "ImportError is: No module named" in str(context.exception)
+ )
+ )
diff --git a/compressor/utils/__init__.py b/compressor/utils/__init__.py
index 8cf6863..fc19fc1 100644
--- a/compressor/utils/__init__.py
+++ b/compressor/utils/__init__.py
@@ -7,16 +7,20 @@ def get_class(class_string, exception=FilterError):
"""
Convert a string version of a function name to the callable object.
"""
- if not hasattr(class_string, '__bases__'):
+ if not hasattr(class_string, "__bases__"):
try:
class_string = str(class_string)
mod_name, class_name = get_mod_func(class_string)
if class_name:
- return getattr(__import__(mod_name, {}, {}, [str('')]), class_name)
+ return getattr(__import__(mod_name, {}, {}, [str("")]), class_name)
except AttributeError as e:
- raise exception('Failed to import %s. AttributeError is: %s' % (class_string, e))
+ raise exception(
+ "Failed to import %s. AttributeError is: %s" % (class_string, e)
+ )
except ImportError as e:
- raise exception('Failed to import %s. ImportError is: %s' % (class_string, e))
+ raise exception(
+ "Failed to import %s. ImportError is: %s" % (class_string, e)
+ )
raise exception("Invalid class path '%s'" % class_string)
@@ -27,10 +31,10 @@ def get_mod_func(callback):
('django.views.news.stories', 'story_detail')
"""
try:
- dot = callback.rindex('.')
+ dot = callback.rindex(".")
except ValueError:
- return callback, ''
- return callback[:dot], callback[dot + 1:]
+ return callback, ""
+ return callback[:dot], callback[dot + 1 :]
def get_pathext(default_pathext=None):
@@ -38,5 +42,5 @@ def get_pathext(default_pathext=None):
Returns the path extensions from environment or a default
"""
if default_pathext is None:
- default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
- return os.environ.get('PATHEXT', default_pathext)
+ default_pathext = os.pathsep.join([".COM", ".EXE", ".BAT", ".CMD"])
+ return os.environ.get("PATHEXT", default_pathext)
diff --git a/compressor/utils/staticfiles.py b/compressor/utils/staticfiles.py
index d5e951d..760369f 100644
--- a/compressor/utils/staticfiles.py
+++ b/compressor/utils/staticfiles.py
@@ -7,11 +7,11 @@ from compressor.conf import settings
if apps.is_installed("django.contrib.staticfiles"):
from django.contrib.staticfiles import finders # noqa
- if ("compressor.finders.CompressorFinder"
- not in settings.STATICFILES_FINDERS):
+ if "compressor.finders.CompressorFinder" not in settings.STATICFILES_FINDERS:
raise ImproperlyConfigured(
"When using Django Compressor together with staticfiles, "
"please add 'compressor.finders.CompressorFinder' to the "
- "STATICFILES_FINDERS setting.")
+ "STATICFILES_FINDERS setting."
+ )
else:
finders = None # noqa