Skip to content

Commit

Permalink
Add sourcmaps support for uglifyjs and cleancss
Browse files Browse the repository at this point in the history
  • Loading branch information
fdintino committed Jan 22, 2016
1 parent ec660fe commit 6fc21a5
Show file tree
Hide file tree
Showing 6 changed files with 163 additions and 23 deletions.
7 changes: 4 additions & 3 deletions pipeline/compressors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

from pipeline.conf import settings
from pipeline.exceptions import CompressorError
from pipeline.utils import to_class, relpath
from pipeline.utils import to_class, relpath, set_std_streams_blocking

URL_DETECTOR = r"""url\((['"]){0,1}\s*(.*?)["']{0,1}\)"""
URL_REPLACER = r"""url\(__EMBED__(.+?)(\?\d+)?\)"""
Expand Down Expand Up @@ -253,7 +253,7 @@ def filter_js(self, js):


class SubProcessCompressor(CompressorBase):
def execute_command(self, command, content):
def execute_command(self, command, content=None, **kwargs):
argument_list = []
for flattening_arg in command:
if isinstance(flattening_arg, string_types):
Expand All @@ -263,10 +263,11 @@ def execute_command(self, command, content):
stdin = subprocess.PIPE if content else None

pipe = subprocess.Popen(argument_list, stdout=subprocess.PIPE,
stdin=stdin, stderr=subprocess.PIPE)
stdin=stdin, stderr=subprocess.PIPE, **kwargs)
if content:
content = smart_bytes(content)
stdout, stderr = pipe.communicate(content)
set_std_streams_blocking()
if stderr.strip() and pipe.returncode != 0:
raise CompressorError(stderr)
elif self.verbose:
Expand Down
14 changes: 1 addition & 13 deletions pipeline/compressors/closure.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,7 @@

from pipeline.conf import settings
from pipeline.compressors import SubProcessCompressor


source_map_re = re.compile((
"(?:"
"/\\*"
"(?:\\s*\r?\n(?://)?)?"
"(?:%(inner)s)"
"\\s*"
"\\*/"
"|"
"//(?:%(inner)s)"
")"
"\\s*$") % {'inner': r"""[#@] sourceMappingURL=([^\s'"]*)"""})
from pipeline.utils import source_map_re


class ClosureCompressor(SubProcessCompressor):
Expand Down
66 changes: 66 additions & 0 deletions pipeline/compressors/cssclean.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from __future__ import unicode_literals

import codecs
import json
import os

from django.contrib.staticfiles.storage import staticfiles_storage

from pipeline.conf import settings
from pipeline.compressors import SubProcessCompressor
from pipeline.utils import source_map_re, relurl


class CleanCSSCompressor(SubProcessCompressor):

def compress_css(self, css):
args = [settings.CLEANCSS_BINARY, settings.CLEANCSS_ARGUMENTS]
return self.execute_command(args, css)

def compress_css_with_source_map(self, paths, output_filename):
output_path = staticfiles_storage.path(output_filename)
output_dir = os.path.dirname(output_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)

args = [settings.CLEANCSS_BINARY]
args += ['--source-map']
if settings.CLEANCSS_ARGUMENTS:
args += [settings.CLEANCSS_ARGUMENTS]
else:
# At present, without these arguments, cleancss does not
# generate accurate source maps
args += [
'--skip-advanced', '--skip-media-merging',
'--skip-restructuring', '--skip-shorthand-compacting',
'--keep-line-breaks']
args += ['--output', output_path]
args += [staticfiles_storage.path(p) for p in paths]

self.execute_command(args, cwd=output_dir)

source_map_file = "%s.map" % output_path

with codecs.open(output_path, encoding='utf-8') as f:
css = f.read()
with codecs.open(source_map_file, encoding='utf-8') as f:
source_map = f.read()

# Strip out existing source map comment (it will be re-added with packaging)
css = source_map_re.sub('', css)

output_url = "%s/%s" % (
staticfiles_storage.url(os.path.dirname(output_filename)),
os.path.basename(output_path))

# Grab urls from staticfiles storage (in case filenames are hashed)
source_map_data = json.loads(source_map)
for i, source in enumerate(source_map_data['sources']):
source_abs_path = os.path.join(output_dir, source)
source_rel_path = os.path.relpath(
source_abs_path, staticfiles_storage.base_location)
source_url = staticfiles_storage.url(source_rel_path)
source_map_data['sources'][i] = relurl(source_url, output_url)
source_map = json.dumps(source_map_data, indent="\t")

return css, source_map
37 changes: 35 additions & 2 deletions pipeline/compressors/uglifyjs.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,45 @@
from __future__ import unicode_literals

import codecs
import tempfile

from django.contrib.staticfiles.storage import staticfiles_storage

from pipeline.conf import settings
from pipeline.compressors import SubProcessCompressor
from pipeline.utils import source_map_re, path_depth


class UglifyJSCompressor(SubProcessCompressor):

def compress_js(self, js):
command = (settings.UGLIFYJS_BINARY, settings.UGLIFYJS_ARGUMENTS)
command = [settings.UGLIFYJS_BINARY, settings.UGLIFYJS_ARGUMENTS]
if self.verbose:
command += ' --verbose'
command.append(' --verbose')
return self.execute_command(command, js)

def compress_js_with_source_map(self, paths):
source_map_file = tempfile.NamedTemporaryFile()

args = [settings.UGLIFYJS_BINARY]
args += [staticfiles_storage.path(p) for p in paths]
args += ["--source-map", source_map_file.name]
args += ["--source-map-root", staticfiles_storage.base_url]
args += ["--prefix", "%s" % path_depth(staticfiles_storage.base_location)]

args += settings.UGLIFYJS_ARGUMENTS

if self.verbose:
args.append('--verbose')

js = self.execute_command(args)

with codecs.open(source_map_file.name, encoding='utf-8') as f:
source_map = f.read()

source_map_file.close()

# Strip out existing source map comment (it will be re-added with packaging)
js = source_map_re.sub('', js)

return js, source_map
3 changes: 3 additions & 0 deletions pipeline/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@
'CSSMIN_BINARY': '/usr/bin/env cssmin',
'CSSMIN_ARGUMENTS': '',

'CLEANCSS_BINARY': '/usr/bin/env cssclean',
'CLEANCSS_ARGUMENTS': '',

'COFFEE_SCRIPT_BINARY': '/usr/bin/env coffee',
'COFFEE_SCRIPT_ARGUMENTS': '',

Expand Down
59 changes: 54 additions & 5 deletions pipeline/utils.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,36 @@
from __future__ import unicode_literals

try:
import fcntl
except ImportError:
# windows
fcntl = None
import importlib
import mimetypes
import posixpath

try:
from urllib.parse import quote
except ImportError:
from urllib import quote
import os
import re
import sys

from django.utils.encoding import smart_text
from django.utils.six.moves.urllib.parse import urlparse, quote

from pipeline.conf import settings


source_map_re = re.compile((
"(?:"
"/\\*"
"(?:\\s*\r?\n(?://)?)?"
"(?:%(inner)s)"
"\\s*"
"\\*/"
"|"
"//(?:%(inner)s)"
")"
"\\s*$") % {'inner': r"""[#@] sourceMappingURL=([^\s'"]*)"""})


def to_class(class_str):
if not class_str:
return None
Expand Down Expand Up @@ -54,3 +71,35 @@ def relpath(path, start=posixpath.curdir):
if not rel_list:
return posixpath.curdir
return posixpath.join(*rel_list)


def relurl(path, start):
base = urlparse(start)
target = urlparse(path)
if base.netloc != target.netloc:
raise ValueError('target and base netlocs do not match')
base_dir = '.' + posixpath.dirname(base.path)
target = '.' + target.path
return posixpath.relpath(target, start=base_dir)


def set_std_streams_blocking():
if not fcntl:
return
for f in (sys.__stdout__, sys.__stderr__):
fileno = f.fileno()
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)


def path_depth(path):
"""Cross-platform compatible path depth count"""
import os
if hasattr(os.path, 'splitunc'):
_, path = os.path.splitunc(path)
parent = os.path.dirname(path)
count = 0
while path != parent:
path, parent = parent, os.path.dirname(parent)
count += 1
return count

0 comments on commit 6fc21a5

Please sign in to comment.