Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sourcemaps #346

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
8 changes: 8 additions & 0 deletions docs/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,14 @@ Other settings

Defaults to ``not settings.DEBUG``.

``PIPELINE_COMPILER_CONCURRENCY``
.................................

If set, overrides the number of threads used to compile assets. Otherwise the
compiler will attempt to use as many threads as there are available cores.

Defaults to ``None``.

``PIPELINE_CSS_COMPRESSOR``
............................

Expand Down
8 changes: 7 additions & 1 deletion pipeline/compilers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,13 @@ def _compile(input_path):
except ImportError:
return list(map(_compile, paths))
else:
with futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count()) as executor:
max_workers = (
settings.PIPELINE_COMPILER_CONCURRENCY or
multiprocessing.cpu_count())

with futures.ThreadPoolExecutor(
max_workers=max_workers
) as executor:
return list(executor.map(_compile, paths))

def output_path(self, path, extension):
Expand Down
33 changes: 21 additions & 12 deletions pipeline/compressors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

URL_DETECTOR = r'url\([\'"]?([^\s)]+\.[a-z]+[^\'"\s]*)[\'"]?\)'
URL_REPLACER = r'url\(__EMBED__(.+?)(\?\d+)?\)'
NON_REWRITABLE_URL = re.compile(r'^(http:|https:|data:|//)')
NON_REWRITABLE_URL = re.compile(r'^(http:|https:|data:|about:|//)')

DEFAULT_TEMPLATE_FUNC = "template"
TEMPLATE_FUNC = r"""var template = function(str){var fn = new Function('obj', 'var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push(\''+str.replace(/\\/g, '\\\\').replace(/'/g, "\\'").replace(/<%=([\s\S]+?)%>/g,function(match,code){return "',"+code.replace(/\\'/g, "'")+",'";}).replace(/<%([\s\S]+?)%>/g,function(match,code){return "');"+code.replace(/\\'/g, "'").replace(/[\r\n\t]/g,' ')+"__p.push('";}).replace(/\r/g,'\\r').replace(/\n/g,'\\n').replace(/\t/g,'\\t')+"');}return __p.join('');");return fn;};"""
Expand Down Expand Up @@ -53,18 +53,27 @@ def css_compressor(self):

def compress_js(self, paths, templates=None, **kwargs):
"""Concatenate and compress JS files"""
js = self.concatenate(paths)
if templates:
js = js + self.compile_templates(templates)
def get_js():
js = self.concatenate(paths)
if templates:
js = js + self.compile_templates(templates)

if not settings.PIPELINE_DISABLE_WRAPPER:
js = "(function() { %s }).call(this);" % js
if not settings.PIPELINE_DISABLE_WRAPPER:
js = "(function() { %s }).call(this);" % js

compressor = self.js_compressor
if compressor:
js = getattr(compressor(verbose=self.verbose), 'compress_js')(js)
return js

compressor_cls = self.js_compressor
if compressor_cls:
compressor = compressor_cls(verbose=self.verbose)
if hasattr(compressor, 'compress_js_with_source_map'):
return getattr(compressor,
'compress_js_with_source_map')(paths)
else:
js = getattr(compressor, 'compress_js')(get_js())
return js, None

return js
return None, None

def compress_css(self, paths, output_filename, variant=None, **kwargs):
"""Concatenate and compress CSS files"""
Expand All @@ -73,9 +82,9 @@ def compress_css(self, paths, output_filename, variant=None, **kwargs):
if compressor:
css = getattr(compressor(verbose=self.verbose), 'compress_css')(css)
if not variant:
return css
return css, None
elif variant == "datauri":
return self.with_data_uri(css)
return self.with_data_uri(css), None
else:
raise CompressorError("\"%s\" is not a valid variant" % variant)

Expand Down
1 change: 1 addition & 0 deletions pipeline/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
'PIPELINE_CSS_COMPRESSOR': 'pipeline.compressors.yuglify.YuglifyCompressor',
'PIPELINE_JS_COMPRESSOR': 'pipeline.compressors.yuglify.YuglifyCompressor',
'PIPELINE_COMPILERS': [],
'PIPELINE_COMPILER_CONCURRENCY': None,

'PIPELINE_CSS': {},
'PIPELINE_JS': {},
Expand Down
20 changes: 16 additions & 4 deletions pipeline/packager.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from __future__ import unicode_literals
import os.path

from django.contrib.staticfiles.finders import find
from django.core.files.base import ContentFile
Expand Down Expand Up @@ -90,24 +91,35 @@ def individual_url(self, filename):

def pack_stylesheets(self, package, **kwargs):
return self.pack(package, self.compressor.compress_css, css_compressed,
'/*# sourceMappingURL={} */',
output_filename=package.output_filename,
variant=package.variant, **kwargs)

def compile(self, paths, force=False):
return self.compiler.compile(paths, force=force)

def pack(self, package, compress, signal, **kwargs):
def pack(self, package, compress, signal, source_mapping_template, **kwargs):
output_filename = package.output_filename
if self.verbose:
print("Saving: %s" % output_filename)
paths = self.compile(package.paths, force=True)
content = compress(paths, **kwargs)
content, source_map = compress(paths, **kwargs)
if source_map is not None:
source_map_output_filename = output_filename + '.map'
if self.verbose:
print("Saving: %s" % source_map_output_filename)
self.save_file(source_map_output_filename, source_map)
content = content + '\n' + source_mapping_template.format(
os.path.basename(source_map_output_filename))
yield source_map_output_filename
self.save_file(output_filename, content)
signal.send(sender=self, package=package, **kwargs)
return output_filename
yield output_filename

def pack_javascripts(self, package, **kwargs):
return self.pack(package, self.compressor.compress_js, js_compressed, templates=package.templates, **kwargs)
return self.pack(package, self.compressor.compress_js, js_compressed,
'//# sourceMappingURL={}',
templates=package.templates, **kwargs)

def pack_templates(self, package):
return self.compressor.compile_templates(package.templates)
Expand Down
37 changes: 29 additions & 8 deletions pipeline/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,21 @@ def post_process(self, paths, dry_run=False, **options):
package = packager.package_for('css', package_name)
output_file = package.output_filename
if self.packing:
packager.pack_stylesheets(package)
paths[output_file] = (self, output_file)
yield output_file, output_file, True
processor = packager.pack_stylesheets(package)
else:
processor = [package.output_filename]
for output_filename in processor:
paths[output_filename] = (self, output_filename)
yield output_filename, output_filename, True
for package_name in packager.packages['js']:
package = packager.package_for('js', package_name)
output_file = package.output_filename
if self.packing:
packager.pack_javascripts(package)
paths[output_file] = (self, output_file)
yield output_file, output_file, True
processor = packager.pack_javascripts(package)
else:
processor = [package.output_filename]
for output_filename in processor:
paths[output_filename] = (self, output_filename)
yield output_filename, output_filename, True

super_class = super(PipelineMixin, self)
if hasattr(super_class, 'post_process'):
Expand Down Expand Up @@ -99,7 +104,23 @@ class NonPackagingPipelineStorage(NonPackagingMixin, PipelineStorage):


class PipelineCachedStorage(PipelineMixin, CachedStaticFilesStorage):
pass
def url_converter(self, name, template=None):
"""
Returns the custom URL converter for the given file name.
"""
django_converter = super(PipelineCachedStorage, self).url_converter(
name, template=template)

def converter(matchobj):
matched, url = matchobj.groups()
# Completely ignore http(s) prefixed URLs,
# fragments and data-uri URLs
if url.startswith(('about:')):
return matched

return django_converter(matchobj)

return converter


class NonPackagingPipelineCachedStorage(NonPackagingMixin, PipelineCachedStorage):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

setup(
name='django-pipeline',
version='1.3.24',
version='1.3.24-sourcemaps-rev3',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
Expand Down
51 changes: 51 additions & 0 deletions tests/tests/test_compiler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import unicode_literals

from django.test import TestCase
from mock import MagicMock, patch

from pipeline.conf import settings
from pipeline.compilers import Compiler, CompilerBase
Expand Down Expand Up @@ -39,5 +40,55 @@ def test_compile(self):
])
self.assertEqual([_('pipeline/js/dummy.js'), _('pipeline/js/application.js')], list(paths))

def _get_mocked_concurrency_packages(self, mock_cpu_count=4):
multiprocessing_mock = MagicMock()
multiprocessing_mock.cpu_count.return_value = mock_cpu_count

concurrent_mock = MagicMock()
thread_pool_executor_mock = concurrent_mock.futures.ThreadPoolExecutor
thread_pool_executor_mock.return_value.__exit__.return_value = False

modules = {
'multiprocessing': multiprocessing_mock,
'concurrent': concurrent_mock,
'concurrent.futures': concurrent_mock.futures,
}
return modules, thread_pool_executor_mock

def test_concurrency_setting(self):
'''
Setting PIPELINE_COMPILER_CONCURRENCY should override the default
CPU count.
'''
modules, thread_pool_executor_mock = (
self._get_mocked_concurrency_packages())

settings.PIPELINE_COMPILER_CONCURRENCY = 2

with patch.dict('sys.modules', modules):
self.compiler.compile([])

thread_pool_executor_mock.assert_called_once_with(
max_workers=settings.PIPELINE_COMPILER_CONCURRENCY
)

settings.PIPELINE_COMPILER_CONCURRENCY = None

def test_empty_concurrency_setting(self):
'''
Compiler should use cpu_count() if PIPELINE_COMPILER_CONCURRENCY is
not set.
'''
MOCK_CPU_COUNT = 4
modules, thread_pool_executor_mock = (
self._get_mocked_concurrency_packages(MOCK_CPU_COUNT))

with patch.dict('sys.modules', modules):
self.compiler.compile([])

thread_pool_executor_mock.assert_called_once_with(
max_workers=MOCK_CPU_COUNT
)

def tearDown(self):
settings.PIPELINE_COMPILERS = self.old_compilers