diff --git a/compressor/conf/settings.py b/compressor/conf/settings.py index c546bb9..3925e2c 100644 --- a/compressor/conf/settings.py +++ b/compressor/conf/settings.py @@ -19,7 +19,7 @@ if COMPRESS_CSS_FILTERS is None: if COMPRESS_JS_FILTERS is None: COMPRESS_JS_FILTERS = [] -COMPRESS_DATA_URI_MIN_SIZE = getattr(settings, 'COMPRESS_DATA_URI_MIN_SIZE', 1024) +DATA_URI_MIN_SIZE = getattr(settings, 'COMPRESS_DATA_URI_MIN_SIZE', 1024) # rebuilds the cache every 30 days if nothing has changed. REBUILD_TIMEOUT = getattr(settings, 'COMPRESS_REBUILD_TIMEOUT', 2592000) # 30 days diff --git a/compressor/filters/datauri.py b/compressor/filters/datauri.py index 6d4471b..cc7bcb6 100644 --- a/compressor/filters/datauri.py +++ b/compressor/filters/datauri.py @@ -31,7 +31,7 @@ class DataUriFilter(FilterBase): url = matchobj.group(1).strip(' \'"') if not url.startswith('data:'): path = self.get_file_path(url) - if os.stat(path).st_size <= settings.COMPRESS_DATA_URI_MIN_SIZE: + if os.stat(path).st_size <= settings.DATA_URI_MIN_SIZE: data = b64encode(open(path, 'rb').read()) return 'url("data:%s;base64,%s")' % (mimetypes.guess_type(path)[0], data) return 'url("%s")' % url