Merge branch 'release/0.9'
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,7 +1,7 @@
|
||||
build
|
||||
compressor/tests/media/CACHE
|
||||
compressor/tests/media/custom
|
||||
compressor/tests/media/js/3f33b9146e12.js
|
||||
compressor/tests/media/js/066cd253eada.js
|
||||
dist
|
||||
MANIFEST
|
||||
*.pyc
|
||||
@@ -9,3 +9,5 @@ MANIFEST
|
||||
.tox/
|
||||
*.egg
|
||||
docs/_build/
|
||||
.coverage
|
||||
htmlcov
|
||||
1
AUTHORS
1
AUTHORS
@@ -17,6 +17,7 @@ David Ziegler
|
||||
Eugene Mirotin
|
||||
Fenn Bailey
|
||||
Gert Van Gool
|
||||
Harro van der Klauw
|
||||
Jaap Roes
|
||||
Jason Davies
|
||||
Jeremy Dunck
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
VERSION = (0, 8, 0, "f", 0) # following PEP 386
|
||||
VERSION = (0, 9, 0, "f", 0) # following PEP 386
|
||||
DEV_N = None
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import socket
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.template.loader import render_to_string
|
||||
@@ -10,7 +9,12 @@ from compressor.exceptions import CompressorError, UncompressableFileError
|
||||
from compressor.filters import CompilerFilter
|
||||
from compressor.storage import default_storage
|
||||
from compressor.utils import get_class, staticfiles
|
||||
from compressor.utils.cache import cached_property
|
||||
from compressor.utils.decorators import cached_property
|
||||
|
||||
# Some constants for nicer handling.
|
||||
SOURCE_HUNK, SOURCE_FILE = 1, 2
|
||||
METHOD_INPUT, METHOD_OUTPUT = 'input', 'output'
|
||||
|
||||
|
||||
class Compressor(object):
|
||||
"""
|
||||
@@ -55,10 +59,8 @@ class Compressor(object):
|
||||
if settings.DEBUG and self.finders:
|
||||
filename = self.finders.find(basename)
|
||||
# secondly try finding the file in the root
|
||||
else:
|
||||
root_filename = os.path.join(settings.COMPRESS_ROOT, basename)
|
||||
if os.path.exists(root_filename):
|
||||
filename = root_filename
|
||||
elif self.storage.exists(basename):
|
||||
filename = self.storage.path(basename)
|
||||
if filename:
|
||||
return filename
|
||||
# or just raise an exception as the last resort
|
||||
@@ -79,22 +81,21 @@ class Compressor(object):
|
||||
def mtimes(self):
|
||||
return [str(get_mtime(value))
|
||||
for kind, value, basename, elem in self.split_contents()
|
||||
if kind == 'file']
|
||||
if kind == SOURCE_FILE]
|
||||
|
||||
@cached_property
|
||||
def cachekey(self):
|
||||
key = get_hexdigest(''.join(
|
||||
return get_hexdigest(''.join(
|
||||
[self.content] + self.mtimes).encode(self.charset), 12)
|
||||
return "django_compressor.%s.%s" % (socket.gethostname(), key)
|
||||
|
||||
@cached_property
|
||||
def hunks(self):
|
||||
for kind, value, basename, elem in self.split_contents():
|
||||
if kind == "hunk":
|
||||
content = self.filter(value, "input",
|
||||
if kind == SOURCE_HUNK:
|
||||
content = self.filter(value, METHOD_INPUT,
|
||||
elem=elem, kind=kind, basename=basename)
|
||||
yield unicode(content)
|
||||
elif kind == "file":
|
||||
elif kind == SOURCE_FILE:
|
||||
content = ""
|
||||
fd = open(value, 'rb')
|
||||
try:
|
||||
@@ -104,7 +105,7 @@ class Compressor(object):
|
||||
"IOError while processing '%s': %s" % (value, e))
|
||||
finally:
|
||||
fd.close()
|
||||
content = self.filter(content, "input",
|
||||
content = self.filter(content, METHOD_INPUT,
|
||||
filename=value, basename=basename, elem=elem, kind=kind)
|
||||
attribs = self.parser.elem_attribs(elem)
|
||||
charset = attribs.get("charset", self.charset)
|
||||
@@ -119,22 +120,21 @@ class Compressor(object):
|
||||
return content
|
||||
attrs = self.parser.elem_attribs(elem)
|
||||
mimetype = attrs.get("type", None)
|
||||
if mimetype is not None:
|
||||
if mimetype:
|
||||
command = self.all_mimetypes.get(mimetype)
|
||||
if command is None:
|
||||
if mimetype not in ("text/css", "text/javascript"):
|
||||
error = ("Couldn't find any precompiler in "
|
||||
"COMPRESS_PRECOMPILERS setting for "
|
||||
"mimetype '%s'." % mimetype)
|
||||
raise CompressorError(error)
|
||||
raise CompressorError("Couldn't find any precompiler in "
|
||||
"COMPRESS_PRECOMPILERS setting for "
|
||||
"mimetype '%s'." % mimetype)
|
||||
else:
|
||||
content = CompilerFilter(content, filter_type=self.type,
|
||||
command=command).output(**kwargs)
|
||||
return CompilerFilter(content, filter_type=self.type,
|
||||
command=command, filename=filename).output(**kwargs)
|
||||
return content
|
||||
|
||||
def filter(self, content, method, **kwargs):
|
||||
# run compiler
|
||||
if method == "input":
|
||||
if method == METHOD_INPUT:
|
||||
content = self.precompile(content, **kwargs)
|
||||
|
||||
for filter_cls in self.cached_filters:
|
||||
@@ -149,14 +149,11 @@ class Compressor(object):
|
||||
|
||||
@cached_property
|
||||
def combined(self):
|
||||
return self.filter(self.concat, method="output")
|
||||
|
||||
def hash(self, content):
|
||||
return get_hexdigest(content)[:12]
|
||||
return self.filter(self.concat, method=METHOD_OUTPUT)
|
||||
|
||||
def filepath(self, content):
|
||||
return os.path.join(settings.COMPRESS_OUTPUT_DIR.strip(os.sep),
|
||||
self.output_prefix, "%s.%s" % (self.hash(content), self.type))
|
||||
self.output_prefix, "%s.%s" % (get_hexdigest(content, 12), self.type))
|
||||
|
||||
def output(self, mode='file', forced=False):
|
||||
"""
|
||||
|
||||
@@ -1,29 +1,37 @@
|
||||
import os
|
||||
import socket
|
||||
import time
|
||||
|
||||
from django.core.cache import get_cache
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.hashcompat import sha_constructor
|
||||
from django.utils.hashcompat import md5_constructor
|
||||
|
||||
from compressor.conf import settings
|
||||
|
||||
|
||||
def get_hexdigest(plaintext, length=None):
|
||||
digest = sha_constructor(smart_str(plaintext)).hexdigest()
|
||||
digest = md5_constructor(smart_str(plaintext)).hexdigest()
|
||||
if length:
|
||||
return digest[:length]
|
||||
return digest
|
||||
|
||||
|
||||
def get_cachekey(key):
|
||||
return ("django_compressor.%s.%s" % (socket.gethostname(), key))
|
||||
|
||||
|
||||
def get_mtime_cachekey(filename):
|
||||
return "django_compressor.mtime.%s.%s" % (socket.gethostname(),
|
||||
get_hexdigest(filename))
|
||||
return get_cachekey("mtime.%s" % get_hexdigest(filename))
|
||||
|
||||
|
||||
def get_offline_cachekey(source):
|
||||
return ("django_compressor.offline.%s.%s" %
|
||||
(socket.gethostname(),
|
||||
get_hexdigest("".join(smart_str(s) for s in source))))
|
||||
return get_cachekey(
|
||||
"offline.%s" % get_hexdigest("".join(smart_str(s) for s in source)))
|
||||
|
||||
|
||||
def get_templatetag_cachekey(compressor, mode, kind):
|
||||
return get_cachekey(
|
||||
"templatetag.%s.%s.%s" % (compressor.cachekey, mode, kind))
|
||||
|
||||
|
||||
def get_mtime(filename):
|
||||
@@ -38,9 +46,34 @@ def get_mtime(filename):
|
||||
|
||||
|
||||
def get_hashed_mtime(filename, length=12):
|
||||
filename = os.path.realpath(filename)
|
||||
mtime = str(int(get_mtime(filename)))
|
||||
try:
|
||||
filename = os.path.realpath(filename)
|
||||
mtime = str(int(get_mtime(filename)))
|
||||
except OSError:
|
||||
return None
|
||||
return get_hexdigest(mtime, length)
|
||||
|
||||
|
||||
def cache_get(key):
|
||||
packed_val = cache.get(key)
|
||||
if packed_val is None:
|
||||
return None
|
||||
val, refresh_time, refreshed = packed_val
|
||||
if (time.time() > refresh_time) and not refreshed:
|
||||
# Store the stale value while the cache
|
||||
# revalidates for another MINT_DELAY seconds.
|
||||
cache_set(key, val, refreshed=True,
|
||||
timeout=settings.COMPRESS_MINT_DELAY)
|
||||
return None
|
||||
return val
|
||||
|
||||
|
||||
def cache_set(key, val, refreshed=False,
|
||||
timeout=settings.COMPRESS_REBUILD_TIMEOUT):
|
||||
refresh_time = timeout + time.time()
|
||||
real_timeout = timeout + settings.COMPRESS_MINT_DELAY
|
||||
packed_val = (val, refresh_time, refreshed)
|
||||
return cache.set(key, packed_val, real_timeout)
|
||||
|
||||
|
||||
cache = get_cache(settings.COMPRESS_CACHE_BACKEND)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import os
|
||||
|
||||
from compressor.conf import settings
|
||||
from compressor.base import Compressor
|
||||
from compressor.base import Compressor, SOURCE_HUNK, SOURCE_FILE
|
||||
from compressor.exceptions import UncompressableFileError
|
||||
|
||||
|
||||
@@ -26,12 +24,12 @@ class CssCompressor(Compressor):
|
||||
try:
|
||||
basename = self.get_basename(elem_attribs['href'])
|
||||
filename = self.get_filename(basename)
|
||||
data = ('file', filename, basename, elem)
|
||||
data = (SOURCE_FILE, filename, basename, elem)
|
||||
except UncompressableFileError:
|
||||
if settings.DEBUG:
|
||||
raise
|
||||
elif elem_name == 'style':
|
||||
data = ('hunk', self.parser.elem_content(elem), None, elem)
|
||||
data = (SOURCE_HUNK, self.parser.elem_content(elem), None, elem)
|
||||
if data:
|
||||
self.split_content.append(data)
|
||||
media = elem_attribs.get('media', None)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import os
|
||||
import logging
|
||||
import subprocess
|
||||
import tempfile
|
||||
@@ -31,15 +30,18 @@ class CompilerFilter(FilterBase):
|
||||
external commands.
|
||||
"""
|
||||
command = None
|
||||
filename = None
|
||||
options = {}
|
||||
|
||||
def __init__(self, content, filter_type=None, verbose=0, command=None, **kwargs):
|
||||
def __init__(self, content, filter_type=None, verbose=0, command=None, filename=None, **kwargs):
|
||||
super(CompilerFilter, self).__init__(content, filter_type, verbose)
|
||||
if command:
|
||||
self.command = command
|
||||
self.options.update(kwargs)
|
||||
if self.command is None:
|
||||
raise FilterError("Required command attribute not set")
|
||||
if filename:
|
||||
self.filename = filename
|
||||
self.stdout = subprocess.PIPE
|
||||
self.stdin = subprocess.PIPE
|
||||
self.stderr = subprocess.PIPE
|
||||
@@ -49,10 +51,13 @@ class CompilerFilter(FilterBase):
|
||||
outfile = None
|
||||
try:
|
||||
if "{infile}" in self.command:
|
||||
infile = tempfile.NamedTemporaryFile(mode='w')
|
||||
infile.write(self.content)
|
||||
infile.flush()
|
||||
self.options["infile"] = infile.name
|
||||
if not self.filename:
|
||||
infile = tempfile.NamedTemporaryFile(mode='w')
|
||||
infile.write(self.content)
|
||||
infile.flush()
|
||||
self.options["infile"] = infile.name
|
||||
else:
|
||||
self.options["infile"] = self.filename
|
||||
if "{outfile}" in self.command:
|
||||
ext = ".%s" % self.type and self.type or ""
|
||||
outfile = tempfile.NamedTemporaryFile(mode='w', suffix=ext)
|
||||
@@ -60,7 +65,7 @@ class CompilerFilter(FilterBase):
|
||||
cmd = stringformat.FormattableString(self.command).format(**self.options)
|
||||
proc = subprocess.Popen(cmd_split(cmd),
|
||||
stdout=self.stdout, stdin=self.stdin, stderr=self.stderr)
|
||||
if infile is not None:
|
||||
if infile is not None or self.filename is not None:
|
||||
filtered, err = proc.communicate()
|
||||
else:
|
||||
filtered, err = proc.communicate(self.content)
|
||||
|
||||
@@ -11,8 +11,15 @@ URL_PATTERN = re.compile(r'url\(([^\)]+)\)')
|
||||
|
||||
|
||||
class CssAbsoluteFilter(FilterBase):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CssAbsoluteFilter, self).__init__(*args, **kwargs)
|
||||
self.root = settings.COMPRESS_ROOT
|
||||
self.url = settings.COMPRESS_URL.rstrip('/')
|
||||
self.url_path = self.url
|
||||
self.has_scheme = False
|
||||
|
||||
def input(self, filename=None, basename=None, **kwargs):
|
||||
self.root = os.path.normcase(os.path.abspath(settings.COMPRESS_ROOT))
|
||||
if filename is not None:
|
||||
filename = os.path.normcase(os.path.abspath(filename))
|
||||
if (not (filename and filename.startswith(self.root)) and
|
||||
@@ -20,23 +27,16 @@ class CssAbsoluteFilter(FilterBase):
|
||||
return self.content
|
||||
self.path = basename.replace(os.sep, '/')
|
||||
self.path = self.path.lstrip('/')
|
||||
self.url = settings.COMPRESS_URL.rstrip('/')
|
||||
self.url_path = self.url
|
||||
try:
|
||||
self.mtime = get_hashed_mtime(filename)
|
||||
except OSError:
|
||||
self.mtime = None
|
||||
self.has_http = False
|
||||
if self.url.startswith('http://') or self.url.startswith('https://'):
|
||||
self.has_http = True
|
||||
self.mtime = get_hashed_mtime(filename)
|
||||
if self.url.startswith(('http://', 'https://')):
|
||||
self.has_scheme = True
|
||||
parts = self.url.split('/')
|
||||
self.url = '/'.join(parts[2:])
|
||||
self.url_path = '/%s' % '/'.join(parts[3:])
|
||||
self.protocol = '%s/' % '/'.join(parts[:2])
|
||||
self.host = parts[2]
|
||||
self.directory_name = '/'.join([self.url, os.path.dirname(self.path)])
|
||||
output = URL_PATTERN.sub(self.url_converter, self.content)
|
||||
return output
|
||||
self.directory_name = '/'.join((self.url, os.path.dirname(self.path)))
|
||||
return URL_PATTERN.sub(self.url_converter, self.content)
|
||||
|
||||
def find(self, basename):
|
||||
if settings.DEBUG and basename and staticfiles.finders:
|
||||
@@ -44,7 +44,7 @@ class CssAbsoluteFilter(FilterBase):
|
||||
|
||||
def guess_filename(self, url):
|
||||
local_path = url
|
||||
if self.has_http:
|
||||
if self.has_scheme:
|
||||
# COMPRESS_URL had a protocol, remove it and the hostname from our path.
|
||||
local_path = local_path.replace(self.protocol + self.host, "", 1)
|
||||
# Now, we just need to check if we can find the path from COMPRESS_URL in our url
|
||||
@@ -59,24 +59,19 @@ class CssAbsoluteFilter(FilterBase):
|
||||
mtime = filename and get_hashed_mtime(filename) or self.mtime
|
||||
if mtime is None:
|
||||
return url
|
||||
if (url.startswith('http://') or
|
||||
url.startswith('https://') or
|
||||
url.startswith('/')):
|
||||
if url.startswith(('http://', 'https://', '/')):
|
||||
if "?" in url:
|
||||
return "%s&%s" % (url, mtime)
|
||||
return "%s?%s" % (url, mtime)
|
||||
url = "%s&%s" % (url, mtime)
|
||||
else:
|
||||
url = "%s?%s" % (url, mtime)
|
||||
return url
|
||||
|
||||
def url_converter(self, matchobj):
|
||||
url = matchobj.group(1)
|
||||
url = url.strip(' \'"')
|
||||
if (url.startswith('http://') or
|
||||
url.startswith('https://') or
|
||||
url.startswith('/') or
|
||||
url.startswith('data:')):
|
||||
if url.startswith(('http://', 'https://', '/', 'data:')):
|
||||
return "url('%s')" % self.add_mtime(url)
|
||||
full_url = '/'.join([str(self.directory_name), url])
|
||||
full_url = posixpath.normpath(full_url)
|
||||
if self.has_http:
|
||||
full_url = posixpath.normpath('/'.join([self.directory_name, url]))
|
||||
if self.has_scheme:
|
||||
full_url = "%s%s" % (self.protocol, full_url)
|
||||
return "url('%s')" % self.add_mtime(full_url)
|
||||
|
||||
@@ -248,4 +248,4 @@ def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from compressor.utils import staticfiles
|
||||
from compressor.storage import CompressorFileStorage
|
||||
|
||||
|
||||
class CompressorFinder(staticfiles.finders.BaseStorageFinder):
|
||||
"""
|
||||
A staticfiles finder that looks in COMPRESS_ROOT
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import os
|
||||
|
||||
from compressor.conf import settings
|
||||
from compressor.base import Compressor
|
||||
from compressor.base import Compressor, SOURCE_HUNK, SOURCE_FILE
|
||||
from compressor.exceptions import UncompressableFileError
|
||||
|
||||
|
||||
@@ -24,11 +22,11 @@ class JsCompressor(Compressor):
|
||||
basename = self.get_basename(attribs['src'])
|
||||
filename = self.get_filename(basename)
|
||||
self.split_content.append(
|
||||
('file', filename, basename, elem))
|
||||
(SOURCE_FILE, filename, basename, elem))
|
||||
except UncompressableFileError:
|
||||
if settings.DEBUG:
|
||||
raise
|
||||
else:
|
||||
content = self.parser.elem_content(elem)
|
||||
self.split_content.append(('hunk', content, None, elem))
|
||||
self.split_content.append((SOURCE_HUNK, content, None, elem))
|
||||
return self.split_content
|
||||
|
||||
@@ -36,6 +36,7 @@ class Command(NoArgsCommand):
|
||||
"can lead to infinite recursion if a link points to a parent "
|
||||
"directory of itself.", dest='follow_links'),
|
||||
)
|
||||
|
||||
def get_loaders(self):
|
||||
from django.template.loader import template_source_loaders
|
||||
if template_source_loaders is None:
|
||||
@@ -113,11 +114,11 @@ class Command(NoArgsCommand):
|
||||
settings.FILE_CHARSET))
|
||||
finally:
|
||||
template_file.close()
|
||||
except IOError: # unreadable file -> ignore
|
||||
except IOError: # unreadable file -> ignore
|
||||
if verbosity > 0:
|
||||
log.write("Unreadable template at: %s\n" % template_name)
|
||||
continue
|
||||
except TemplateSyntaxError: # broken template -> ignore
|
||||
except TemplateSyntaxError: # broken template -> ignore
|
||||
if verbosity > 0:
|
||||
log.write("Invalid template at: %s\n" % template_name)
|
||||
continue
|
||||
@@ -159,7 +160,7 @@ class Command(NoArgsCommand):
|
||||
def walk_nodes(self, node):
|
||||
for node in getattr(node, "nodelist", []):
|
||||
if (isinstance(node, CompressorNode) or
|
||||
node.__class__.__name__ == "CompressorNode"): # for 1.1.X
|
||||
node.__class__.__name__ == "CompressorNode"): # for 1.1.X
|
||||
yield node
|
||||
else:
|
||||
for node in self.walk_nodes(node):
|
||||
@@ -180,7 +181,7 @@ class Command(NoArgsCommand):
|
||||
"""
|
||||
ext_list = []
|
||||
for ext in extensions:
|
||||
ext_list.extend(ext.replace(' ','').split(','))
|
||||
ext_list.extend(ext.replace(' ', '').split(','))
|
||||
for i, ext in enumerate(ext_list):
|
||||
if not ext.startswith('.'):
|
||||
ext_list[i] = '.%s' % ext_list[i]
|
||||
|
||||
@@ -8,6 +8,7 @@ from compressor.cache import cache, get_mtime, get_mtime_cachekey
|
||||
from compressor.conf import settings
|
||||
from compressor.utils import walk
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
help = "Add or remove all mtime values from the cache"
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.utils.encoding import smart_unicode
|
||||
|
||||
from compressor.exceptions import ParserError
|
||||
from compressor.parser import ParserBase
|
||||
from compressor.utils.cache import cached_property
|
||||
from compressor.utils.decorators import cached_property
|
||||
|
||||
|
||||
class BeautifulSoupParser(ParserBase):
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
from compressor.exceptions import ParserError
|
||||
from compressor.parser import ParserBase
|
||||
from compressor.utils.cache import cached_property
|
||||
from compressor.utils.decorators import cached_property
|
||||
|
||||
|
||||
class Html5LibParser(ParserBase):
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from HTMLParser import HTMLParser
|
||||
from django.utils.encoding import smart_unicode
|
||||
from django.utils.datastructures import SortedDict
|
||||
from compressor.exceptions import ParserError
|
||||
from compressor.parser import ParserBase
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.utils.encoding import smart_unicode
|
||||
|
||||
from compressor.exceptions import ParserError
|
||||
from compressor.parser import ParserBase
|
||||
from compressor.utils.cache import cached_property
|
||||
from compressor.utils.decorators import cached_property
|
||||
|
||||
|
||||
class LxmlParser(ParserBase):
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from django import VERSION as DJANGO_VERSION
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
@@ -66,7 +67,7 @@ class CompressorSettings(AppSettings):
|
||||
if not value:
|
||||
raise ImproperlyConfigured(
|
||||
"The COMPRESS_ROOT setting must be set.")
|
||||
return value
|
||||
return os.path.normcase(os.path.abspath(value))
|
||||
|
||||
def configure_url(self, value):
|
||||
# Uses Django 1.3's STATIC_URL by default or falls back to MEDIA_URL
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import time
|
||||
|
||||
from django import template
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
from compressor.cache import cache, get_offline_cachekey
|
||||
from compressor.cache import (cache, cache_get, cache_set,
|
||||
get_offline_cachekey, get_templatetag_cachekey)
|
||||
from compressor.conf import settings
|
||||
from compressor.utils import get_class
|
||||
|
||||
register = template.Library()
|
||||
|
||||
OUTPUT_FILE = 'file'
|
||||
OUTPUT_INLINE = 'inline'
|
||||
OUTPUT_MODES = (OUTPUT_FILE, OUTPUT_INLINE)
|
||||
@@ -15,9 +16,8 @@ COMPRESSORS = {
|
||||
"js": settings.COMPRESS_JS_COMPRESSOR,
|
||||
}
|
||||
|
||||
register = template.Library()
|
||||
|
||||
class CompressorNode(template.Node):
|
||||
|
||||
def __init__(self, nodelist, kind=None, mode=OUTPUT_FILE):
|
||||
self.nodelist = nodelist
|
||||
self.kind = kind
|
||||
@@ -25,29 +25,6 @@ class CompressorNode(template.Node):
|
||||
self.compressor_cls = get_class(
|
||||
COMPRESSORS.get(self.kind), exception=ImproperlyConfigured)
|
||||
|
||||
def cache_get(self, key):
|
||||
packed_val = cache.get(key)
|
||||
if packed_val is None:
|
||||
return None
|
||||
val, refresh_time, refreshed = packed_val
|
||||
if (time.time() > refresh_time) and not refreshed:
|
||||
# Store the stale value while the cache
|
||||
# revalidates for another MINT_DELAY seconds.
|
||||
self.cache_set(key, val, refreshed=True,
|
||||
timeout=settings.COMPRESS_MINT_DELAY)
|
||||
return None
|
||||
return val
|
||||
|
||||
def cache_set(self, key, val, refreshed=False,
|
||||
timeout=settings.COMPRESS_REBUILD_TIMEOUT):
|
||||
refresh_time = timeout + time.time()
|
||||
real_timeout = timeout + settings.COMPRESS_MINT_DELAY
|
||||
packed_val = (val, refresh_time, refreshed)
|
||||
return cache.set(key, packed_val, real_timeout)
|
||||
|
||||
def cache_key(self, compressor):
|
||||
return "%s.%s.%s" % (compressor.cachekey, self.mode, self.kind)
|
||||
|
||||
def debug_mode(self, context):
|
||||
if settings.COMPRESS_DEBUG_TOGGLE:
|
||||
# Only check for the debug parameter
|
||||
@@ -71,8 +48,9 @@ class CompressorNode(template.Node):
|
||||
and return a tuple of cache key and output
|
||||
"""
|
||||
if settings.COMPRESS_ENABLED and not forced:
|
||||
cache_key = self.cache_key(compressor)
|
||||
cache_content = self.cache_get(cache_key)
|
||||
cache_key = get_templatetag_cachekey(
|
||||
compressor, self.mode, self.kind)
|
||||
cache_content = cache_get(cache_key)
|
||||
return cache_key, cache_content
|
||||
return None, None
|
||||
|
||||
@@ -96,7 +74,7 @@ class CompressorNode(template.Node):
|
||||
try:
|
||||
rendered_output = compressor.output(self.mode, forced=forced)
|
||||
if cache_key:
|
||||
self.cache_set(cache_key, rendered_output)
|
||||
cache_set(cache_key, rendered_output)
|
||||
return rendered_output
|
||||
except Exception, e:
|
||||
if settings.DEBUG or forced:
|
||||
@@ -105,6 +83,7 @@ class CompressorNode(template.Node):
|
||||
# 5. Or don't do anything in production
|
||||
return self.nodelist.render(context)
|
||||
|
||||
|
||||
@register.tag
|
||||
def compress(parser, token):
|
||||
"""
|
||||
|
||||
34
compressor/tests/precompiler.py
Normal file
34
compressor/tests/precompiler.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
def main():
|
||||
p = optparse.OptionParser()
|
||||
p.add_option('-f', '--file', action="store",
|
||||
type="string", dest="filename",
|
||||
help="File to read from, defaults to stdin", default=None)
|
||||
p.add_option('-o', '--output', action="store",
|
||||
type="string", dest="outfile",
|
||||
help="File to write to, defaults to stdout", default=None)
|
||||
|
||||
options, arguments = p.parse_args()
|
||||
|
||||
if options.filename:
|
||||
f = open(options.filename)
|
||||
content = f.read()
|
||||
f.close()
|
||||
else:
|
||||
content = sys.stdin.read()
|
||||
|
||||
content = content.replace('background:', 'color:')
|
||||
|
||||
if options.outfile:
|
||||
f = open(options.outfile, 'w')
|
||||
f.write(content)
|
||||
f.close()
|
||||
else:
|
||||
print content
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
import coverage
|
||||
from os.path import join
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@@ -28,9 +30,23 @@ from django.test.simple import run_tests
|
||||
def runtests(*test_args):
|
||||
if not test_args:
|
||||
test_args = ['tests']
|
||||
parent = os.path.join(TEST_DIR, "..", "..")
|
||||
sys.path.insert(0, parent)
|
||||
parent_dir = os.path.join(TEST_DIR, "..", "..")
|
||||
sys.path.insert(0, parent_dir)
|
||||
cov = coverage.coverage(branch=True,
|
||||
include=[
|
||||
os.path.join(parent_dir, 'compressor', '*.py')
|
||||
],
|
||||
omit=[
|
||||
join(parent_dir, 'compressor', 'tests', '*.py'),
|
||||
join(parent_dir, 'compressor', 'utils', 'stringformat.py'),
|
||||
join(parent_dir, 'compressor', 'filters', 'jsmin', 'rjsmin.py'),
|
||||
join(parent_dir, 'compressor', 'filters', 'cssmin', 'cssmin.py'),
|
||||
])
|
||||
cov.load()
|
||||
cov.start()
|
||||
failures = run_tests(test_args, verbosity=1, interactive=True)
|
||||
cov.stop()
|
||||
cov.save()
|
||||
sys.exit(failures)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import with_statement
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
from unittest2 import skipIf
|
||||
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
@@ -26,12 +27,15 @@ from django.template import Template, Context, TemplateSyntaxError
|
||||
from django.test import TestCase
|
||||
|
||||
from compressor import base
|
||||
from compressor.cache import get_hashed_mtime
|
||||
from compressor.base import SOURCE_HUNK, SOURCE_FILE
|
||||
from compressor.cache import get_hashed_mtime, get_hexdigest
|
||||
from compressor.conf import settings
|
||||
from compressor.css import CssCompressor
|
||||
from compressor.js import JsCompressor
|
||||
from compressor.management.commands.compress import Command as CompressCommand
|
||||
from compressor.utils import find_command
|
||||
from compressor.filters.base import CompilerFilter
|
||||
|
||||
|
||||
class CompressorTestCase(TestCase):
|
||||
|
||||
@@ -55,9 +59,9 @@ class CompressorTestCase(TestCase):
|
||||
|
||||
def test_css_split(self):
|
||||
out = [
|
||||
('file', os.path.join(settings.COMPRESS_ROOT, u'css/one.css'), u'css/one.css', u'<link rel="stylesheet" href="/media/css/one.css" type="text/css" charset="utf-8" />'),
|
||||
('hunk', u'p { border:5px solid green;}', None, u'<style type="text/css">p { border:5px solid green;}</style>'),
|
||||
('file', os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'css/two.css', u'<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8" />'),
|
||||
(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'css/one.css'), u'css/one.css', u'<link rel="stylesheet" href="/media/css/one.css" type="text/css" charset="utf-8" />'),
|
||||
(SOURCE_HUNK, u'p { border:5px solid green;}', None, u'<style type="text/css">p { border:5px solid green;}</style>'),
|
||||
(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'css/two.css', u'<link rel="stylesheet" href="/media/css/two.css" type="text/css" charset="utf-8" />'),
|
||||
]
|
||||
split = self.css_node.split_contents()
|
||||
split = [(x[0], x[1], x[2], self.css_node.parser.elem_str(x[3])) for x in split]
|
||||
@@ -74,27 +78,28 @@ class CompressorTestCase(TestCase):
|
||||
def test_css_mtimes(self):
|
||||
is_date = re.compile(r'^\d{10}[\.\d]+$')
|
||||
for date in self.css_node.mtimes:
|
||||
self.assert_(is_date.match(str(float(date))), "mtimes is returning something that doesn't look like a date: %s" % date)
|
||||
self.assertTrue(is_date.match(str(float(date))),
|
||||
"mtimes is returning something that doesn't look like a date: %s" % date)
|
||||
|
||||
def test_css_return_if_off(self):
|
||||
settings.COMPRESS_ENABLED = False
|
||||
self.assertEqual(self.css, self.css_node.output())
|
||||
|
||||
def test_cachekey(self):
|
||||
host_name = socket.gethostname()
|
||||
is_cachekey = re.compile(r'django_compressor\.%s\.\w{12}' % host_name)
|
||||
self.assert_(is_cachekey.match(self.css_node.cachekey), "cachekey is returning something that doesn't look like r'django_compressor\.%s\.\w{12}'" % host_name)
|
||||
is_cachekey = re.compile(r'\w{12}')
|
||||
self.assertTrue(is_cachekey.match(self.css_node.cachekey),
|
||||
"cachekey is returning something that doesn't look like r'\w{12}'")
|
||||
|
||||
def test_css_hash(self):
|
||||
self.assertEqual('666f3aa8eacd', self.css_node.hash(self.css))
|
||||
self.assertEqual('c618e6846d04', get_hexdigest(self.css, 12))
|
||||
|
||||
def test_css_return_if_on(self):
|
||||
output = u'<link rel="stylesheet" href="/media/CACHE/css/f7c661b7a124.css" type="text/css">'
|
||||
output = u'<link rel="stylesheet" href="/media/CACHE/css/e41ba2cc6982.css" type="text/css">'
|
||||
self.assertEqual(output, self.css_node.output().strip())
|
||||
|
||||
def test_js_split(self):
|
||||
out = [('file', os.path.join(settings.COMPRESS_ROOT, u'js/one.js'), u'js/one.js', '<script src="/media/js/one.js" type="text/javascript" charset="utf-8"></script>'),
|
||||
('hunk', u'obj.value = "value";', None, '<script type="text/javascript" charset="utf-8">obj.value = "value";</script>')
|
||||
out = [(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'js/one.js'), u'js/one.js', '<script src="/media/js/one.js" type="text/javascript" charset="utf-8"></script>'),
|
||||
(SOURCE_HUNK, u'obj.value = "value";', None, '<script type="text/javascript" charset="utf-8">obj.value = "value";</script>')
|
||||
]
|
||||
split = self.js_node.split_contents()
|
||||
split = [(x[0], x[1], x[2], self.js_node.parser.elem_str(x[3])) for x in split]
|
||||
@@ -124,20 +129,20 @@ class CompressorTestCase(TestCase):
|
||||
settings.COMPRESS_PRECOMPILERS = precompilers
|
||||
|
||||
def test_js_return_if_on(self):
|
||||
output = u'<script type="text/javascript" src="/media/CACHE/js/3f33b9146e12.js" charset="utf-8"></script>'
|
||||
output = u'<script type="text/javascript" src="/media/CACHE/js/066cd253eada.js" charset="utf-8"></script>'
|
||||
self.assertEqual(output, self.js_node.output())
|
||||
|
||||
def test_custom_output_dir(self):
|
||||
try:
|
||||
old_output_dir = settings.COMPRESS_OUTPUT_DIR
|
||||
settings.COMPRESS_OUTPUT_DIR = 'custom'
|
||||
output = u'<script type="text/javascript" src="/media/custom/js/3f33b9146e12.js" charset="utf-8"></script>'
|
||||
output = u'<script type="text/javascript" src="/media/custom/js/066cd253eada.js" charset="utf-8"></script>'
|
||||
self.assertEqual(output, JsCompressor(self.js).output())
|
||||
settings.COMPRESS_OUTPUT_DIR = ''
|
||||
output = u'<script type="text/javascript" src="/media/js/3f33b9146e12.js" charset="utf-8"></script>'
|
||||
output = u'<script type="text/javascript" src="/media/js/066cd253eada.js" charset="utf-8"></script>'
|
||||
self.assertEqual(output, JsCompressor(self.js).output())
|
||||
settings.COMPRESS_OUTPUT_DIR = '/custom/nested/'
|
||||
output = u'<script type="text/javascript" src="/media/custom/nested/js/3f33b9146e12.js" charset="utf-8"></script>'
|
||||
output = u'<script type="text/javascript" src="/media/custom/nested/js/066cd253eada.js" charset="utf-8"></script>'
|
||||
self.assertEqual(output, JsCompressor(self.js).output())
|
||||
finally:
|
||||
settings.COMPRESS_OUTPUT_DIR = old_output_dir
|
||||
@@ -164,17 +169,17 @@ class Html5LibParserTests(ParserTestCase, CompressorTestCase):
|
||||
|
||||
def test_css_split(self):
|
||||
out = [
|
||||
('file', os.path.join(settings.COMPRESS_ROOT, u'css/one.css'), u'css/one.css', u'<link charset="utf-8" href="/media/css/one.css" rel="stylesheet" type="text/css">'),
|
||||
('hunk', u'p { border:5px solid green;}', None, u'<style type="text/css">p { border:5px solid green;}</style>'),
|
||||
('file', os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'css/two.css', u'<link charset="utf-8" href="/media/css/two.css" rel="stylesheet" type="text/css">'),
|
||||
(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'css/one.css'), u'css/one.css', u'<link charset="utf-8" href="/media/css/one.css" rel="stylesheet" type="text/css">'),
|
||||
(SOURCE_HUNK, u'p { border:5px solid green;}', None, u'<style type="text/css">p { border:5px solid green;}</style>'),
|
||||
(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'css/two.css'), u'css/two.css', u'<link charset="utf-8" href="/media/css/two.css" rel="stylesheet" type="text/css">'),
|
||||
]
|
||||
split = self.css_node.split_contents()
|
||||
split = [(x[0], x[1], x[2], self.css_node.parser.elem_str(x[3])) for x in split]
|
||||
self.assertEqual(out, split)
|
||||
|
||||
def test_js_split(self):
|
||||
out = [('file', os.path.join(settings.COMPRESS_ROOT, u'js/one.js'), u'js/one.js', u'<script charset="utf-8" src="/media/js/one.js" type="text/javascript"></script>'),
|
||||
('hunk', u'obj.value = "value";', None, u'<script charset="utf-8" type="text/javascript">obj.value = "value";</script>')
|
||||
out = [(SOURCE_FILE, os.path.join(settings.COMPRESS_ROOT, u'js/one.js'), u'js/one.js', u'<script charset="utf-8" src="/media/js/one.js" type="text/javascript"></script>'),
|
||||
(SOURCE_HUNK, u'obj.value = "value";', None, u'<script charset="utf-8" type="text/javascript">obj.value = "value";</script>')
|
||||
]
|
||||
split = self.js_node.split_contents()
|
||||
split = [(x[0], x[1], x[2], self.js_node.parser.elem_str(x[3])) for x in split]
|
||||
@@ -213,6 +218,7 @@ class CssAbsolutizingTestCase(TestCase):
|
||||
filter = CssAbsoluteFilter(content)
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
settings.COMPRESS_URL = 'http://media.example.com/'
|
||||
filter = CssAbsoluteFilter(content)
|
||||
filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
|
||||
output = "p { background: url('%simages/image.gif?%s') }" % (settings.COMPRESS_URL, get_hashed_mtime(filename))
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
@@ -225,6 +231,7 @@ class CssAbsolutizingTestCase(TestCase):
|
||||
filter = CssAbsoluteFilter(content)
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
settings.COMPRESS_URL = 'https://media.example.com/'
|
||||
filter = CssAbsoluteFilter(content)
|
||||
filename = os.path.join(settings.COMPRESS_ROOT, 'css/url/test.css')
|
||||
output = "p { background: url('%simages/image.gif?%s') }" % (settings.COMPRESS_URL, get_hashed_mtime(filename))
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
@@ -237,6 +244,7 @@ class CssAbsolutizingTestCase(TestCase):
|
||||
filter = CssAbsoluteFilter(content)
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
settings.COMPRESS_URL = 'https://media.example.com/'
|
||||
filter = CssAbsoluteFilter(content)
|
||||
output = "p { background: url('%simages/image.gif?%s') }" % (settings.COMPRESS_URL, get_hashed_mtime(filename))
|
||||
self.assertEqual(output, filter.input(filename=filename, basename='css/url/test.css'))
|
||||
|
||||
@@ -335,7 +343,7 @@ class TemplatetagTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = u'<link rel="stylesheet" href="/media/CACHE/css/f7c661b7a124.css" type="text/css">'
|
||||
out = u'<link rel="stylesheet" href="/media/CACHE/css/e41ba2cc6982.css" type="text/css">'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
def test_nonascii_css_tag(self):
|
||||
@@ -345,7 +353,7 @@ class TemplatetagTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = '<link rel="stylesheet" href="/media/CACHE/css/1c1c0855907b.css" type="text/css">'
|
||||
out = '<link rel="stylesheet" href="/media/CACHE/css/799f6defe43c.css" type="text/css">'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
def test_js_tag(self):
|
||||
@@ -355,7 +363,7 @@ class TemplatetagTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/3f33b9146e12.js" charset="utf-8"></script>'
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/066cd253eada.js" charset="utf-8"></script>'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
def test_nonascii_js_tag(self):
|
||||
@@ -365,7 +373,7 @@ class TemplatetagTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/5d5c0e1cb25f.js" charset="utf-8"></script>'
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/e214fe629b28.js" charset="utf-8"></script>'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
def test_nonascii_latin1_js_tag(self):
|
||||
@@ -375,7 +383,7 @@ class TemplatetagTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/40a8e9ffb476.js" charset="utf-8"></script>'
|
||||
out = u'<script type="text/javascript" src="/media/CACHE/js/f1be5a5de243.js" charset="utf-8"></script>'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
def test_compress_tag_with_illegal_arguments(self):
|
||||
@@ -414,7 +422,7 @@ class StorageTestCase(TestCase):
|
||||
{% endcompress %}
|
||||
"""
|
||||
context = { 'MEDIA_URL': settings.COMPRESS_URL }
|
||||
out = u'<link rel="stylesheet" href="/media/CACHE/css/5b231a62e9a6.css.gz" type="text/css">'
|
||||
out = u'<link rel="stylesheet" href="/media/CACHE/css/1d4424458f88.css.gz" type="text/css">'
|
||||
self.assertEqual(out, render(template, context))
|
||||
|
||||
|
||||
@@ -447,8 +455,8 @@ class OfflineGenerationTestCase(TestCase):
|
||||
count, result = CompressCommand().compress()
|
||||
self.assertEqual(2, count)
|
||||
self.assertEqual([
|
||||
u'<link rel="stylesheet" href="/media/CACHE/css/a55e1cf95000.css" type="text/css">\n',
|
||||
u'<script type="text/javascript" src="/media/CACHE/js/bf53fa5b13e2.js" charset="utf-8"></script>',
|
||||
u'<link rel="stylesheet" href="/media/CACHE/css/cd579b7deb7d.css" type="text/css">\n',
|
||||
u'<script type="text/javascript" src="/media/CACHE/js/0a2bb9a287c0.js" charset="utf-8"></script>',
|
||||
], result)
|
||||
|
||||
def test_offline_with_context(self):
|
||||
@@ -459,8 +467,8 @@ class OfflineGenerationTestCase(TestCase):
|
||||
count, result = CompressCommand().compress()
|
||||
self.assertEqual(2, count)
|
||||
self.assertEqual([
|
||||
u'<link rel="stylesheet" href="/media/CACHE/css/8a2405e029de.css" type="text/css">\n',
|
||||
u'<script type="text/javascript" src="/media/CACHE/js/bf53fa5b13e2.js" charset="utf-8"></script>',
|
||||
u'<link rel="stylesheet" href="/media/CACHE/css/ee62fbfd116a.css" type="text/css">\n',
|
||||
u'<script type="text/javascript" src="/media/CACHE/js/0a2bb9a287c0.js" charset="utf-8"></script>',
|
||||
], result)
|
||||
settings.COMPRESS_OFFLINE_CONTEXT = self._old_offline_context
|
||||
|
||||
@@ -481,3 +489,32 @@ CssTidyTestCase = skipIf(
|
||||
find_command(settings.COMPRESS_CSSTIDY_BINARY) is None,
|
||||
'CSStidy binary %r not found' % settings.COMPRESS_CSSTIDY_BINARY
|
||||
)(CssTidyTestCase)
|
||||
|
||||
class PrecompilerTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.this_dir = os.path.dirname(__file__)
|
||||
self.filename = os.path.join(self.this_dir, 'media/css/one.css')
|
||||
self.test_precompiler = os.path.join(self.this_dir, 'precompiler.py')
|
||||
with open(self.filename, 'r') as f:
|
||||
self.content = f.read()
|
||||
|
||||
def test_precompiler_infile_outfile(self):
|
||||
command = '%s %s -f {infile} -o {outfile}' % (sys.executable, self.test_precompiler)
|
||||
compiler = CompilerFilter(content=self.content, filename=self.filename, command=command)
|
||||
self.assertEqual(u"body { color:#990; }", compiler.output())
|
||||
|
||||
def test_precompiler_stdin_outfile(self):
|
||||
command = '%s %s -o {outfile}' % (sys.executable, self.test_precompiler)
|
||||
compiler = CompilerFilter(content=self.content, filename=None, command=command)
|
||||
self.assertEqual(u"body { color:#990; }", compiler.output())
|
||||
|
||||
def test_precompiler_stdin_stdout(self):
|
||||
command = '%s %s' % (sys.executable, self.test_precompiler)
|
||||
compiler = CompilerFilter(content=self.content, filename=None, command=command)
|
||||
self.assertEqual(u"body { color:#990; }\n", compiler.output())
|
||||
|
||||
def test_precompiler_infile_stdout(self):
|
||||
command = '%s %s -f {infile}' % (sys.executable, self.test_precompiler)
|
||||
compiler = CompilerFilter(content=self.content, filename=None, command=command)
|
||||
self.assertEqual(u"body { color:#990; }\n", compiler.output())
|
||||
|
||||
@@ -1,20 +1,38 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import sys
|
||||
from shlex import split as cmd_split
|
||||
|
||||
from compressor.exceptions import FilterError
|
||||
|
||||
try:
|
||||
any = any
|
||||
|
||||
except NameError:
|
||||
|
||||
if sys.version_info < (2, 5):
|
||||
# Add any http://docs.python.org/library/functions.html?#any to Python < 2.5
|
||||
def any(seq):
|
||||
for item in seq:
|
||||
if item:
|
||||
return True
|
||||
return False
|
||||
|
||||
else:
|
||||
any = any
|
||||
|
||||
|
||||
if sys.version_info < (2, 6):
|
||||
def walk(root, topdown=True, onerror=None, followlinks=False):
|
||||
"""
|
||||
A version of os.walk that can follow symlinks for Python < 2.6
|
||||
"""
|
||||
for dirpath, dirnames, filenames in os.walk(root, topdown, onerror):
|
||||
yield (dirpath, dirnames, filenames)
|
||||
if followlinks:
|
||||
for d in dirnames:
|
||||
p = os.path.join(dirpath, d)
|
||||
if os.path.islink(p):
|
||||
for link_dirpath, link_dirnames, link_filenames in walk(p):
|
||||
yield (link_dirpath, link_dirnames, link_filenames)
|
||||
else:
|
||||
from os import walk
|
||||
|
||||
|
||||
def get_class(class_string, exception=FilterError):
|
||||
"""
|
||||
@@ -45,20 +63,6 @@ def get_mod_func(callback):
|
||||
return callback[:dot], callback[dot + 1:]
|
||||
|
||||
|
||||
def walk(root, topdown=True, onerror=None, followlinks=False):
|
||||
"""
|
||||
A version of os.walk that can follow symlinks for Python < 2.6
|
||||
"""
|
||||
for dirpath, dirnames, filenames in os.walk(root, topdown, onerror):
|
||||
yield (dirpath, dirnames, filenames)
|
||||
if followlinks:
|
||||
for d in dirnames:
|
||||
p = os.path.join(dirpath, d)
|
||||
if os.path.islink(p):
|
||||
for link_dirpath, link_dirnames, link_filenames in walk(p):
|
||||
yield (link_dirpath, link_dirnames, link_filenames)
|
||||
|
||||
|
||||
def get_pathext(default_pathext=None):
|
||||
"""
|
||||
Returns the path extensions from environment or a default
|
||||
|
||||
@@ -275,4 +275,4 @@ def selftest():
|
||||
print 'Test successful'
|
||||
|
||||
if __name__ == '__main__':
|
||||
selftest()
|
||||
selftest()
|
||||
|
||||
@@ -1,6 +1,18 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
0.9
|
||||
---
|
||||
|
||||
- Fixed the precompiler support to also use the full file path instead of a
|
||||
temporarily created file.
|
||||
|
||||
- Enabled test coverage.
|
||||
|
||||
- Refactored caching and other utility code.
|
||||
|
||||
- Switched from SHA1 to MD5 for hash generation to lower the computational impact.
|
||||
|
||||
0.8
|
||||
---
|
||||
|
||||
|
||||
@@ -48,9 +48,9 @@ copyright = u'2011, Django Compressor authors'
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.8'
|
||||
version = '0.9'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.8'
|
||||
release = '0.9'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
||||
24
tox.ini
24
tox.ini
@@ -1,8 +1,17 @@
|
||||
[testenv]
|
||||
distribute=false
|
||||
sitepackages=true
|
||||
distribute = false
|
||||
sitepackages = true
|
||||
commands =
|
||||
python compressor/tests/runtests.py
|
||||
{envpython} compressor/tests/runtests.py []
|
||||
coverage html -d {envtmpdir}/coverage
|
||||
|
||||
[testenv:docs]
|
||||
changedir = docs
|
||||
deps =
|
||||
Sphinx
|
||||
commands =
|
||||
make clean
|
||||
make html
|
||||
|
||||
[testenv:py25-1.1.X]
|
||||
basepython = python2.5
|
||||
@@ -10,6 +19,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.1.4
|
||||
|
||||
[testenv:py26-1.1.X]
|
||||
@@ -18,6 +28,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.1.4
|
||||
|
||||
[testenv:py27-1.1.X]
|
||||
@@ -26,6 +37,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.1.4
|
||||
|
||||
|
||||
@@ -35,6 +47,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.2.5
|
||||
|
||||
[testenv:py26-1.2.X]
|
||||
@@ -43,6 +56,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.2.5
|
||||
|
||||
[testenv:py27-1.2.X]
|
||||
@@ -51,6 +65,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.2.5
|
||||
|
||||
|
||||
@@ -60,6 +75,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.3
|
||||
|
||||
[testenv:py26-1.3.X]
|
||||
@@ -68,6 +84,7 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.3
|
||||
|
||||
[testenv:py27-1.3.X]
|
||||
@@ -76,4 +93,5 @@ deps =
|
||||
unittest2
|
||||
BeautifulSoup
|
||||
html5lib
|
||||
coverage
|
||||
django==1.3
|
||||
|
||||
Reference in New Issue
Block a user