Skip to content

Commit

Permalink
Re-apply Black formatting, and wrap docstrings at ~88 columns. (#639)
Browse files Browse the repository at this point in the history
  • Loading branch information
kkroening authored Mar 6, 2022
1 parent f307972 commit fd1da13
Show file tree
Hide file tree
Showing 9 changed files with 340 additions and 223 deletions.
6 changes: 2 additions & 4 deletions ffmpeg/_ffmpeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@ def input(filename, **kwargs):

@output_operator()
def global_args(stream, *args):
"""Add extra global command-line argument(s), e.g. ``-progress``.
"""
"""Add extra global command-line argument(s), e.g. ``-progress``."""
return GlobalNode(stream, global_args.__name__, args).stream()


Expand All @@ -50,8 +49,7 @@ def overwrite_output(stream):

@output_operator()
def merge_outputs(*streams):
"""Include all given outputs in one ffmpeg command line
"""
"""Include all given outputs in one ffmpeg command line"""
return MergeOutputsNode(streams, merge_outputs.__name__).stream()


Expand Down
355 changes: 202 additions & 153 deletions ffmpeg/_filters.py

Large diffs are not rendered by default.

25 changes: 17 additions & 8 deletions ffmpeg/_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ def _allocate_filter_stream_names(filter_nodes, outgoing_edge_maps, stream_name_
if len(downstreams) > 1:
# TODO: automatically insert `splits` ahead of time via graph transformation.
raise ValueError(
'Encountered {} with multiple outgoing edges with same upstream label {!r}; a '
'`split` filter is probably required'.format(
'Encountered {} with multiple outgoing edges with same upstream '
'label {!r}; a `split` filter is probably required'.format(
upstream_node, upstream_label
)
)
Expand Down Expand Up @@ -199,7 +199,7 @@ def run_async(
pipe_stderr=False,
quiet=False,
overwrite_output=False,
cwd=None
cwd=None,
):
"""Asynchronously invoke ffmpeg for the supplied node graph.
Expand Down Expand Up @@ -286,8 +286,11 @@ def run_async(
stderr_stream = subprocess.STDOUT
stdout_stream = subprocess.DEVNULL
return subprocess.Popen(
args, stdin=stdin_stream, stdout=stdout_stream, stderr=stderr_stream,
cwd=cwd
args,
stdin=stdin_stream,
stdout=stdout_stream,
stderr=stderr_stream,
cwd=cwd,
)


Expand All @@ -300,7 +303,7 @@ def run(
input=None,
quiet=False,
overwrite_output=False,
cwd=None
cwd=None,
):
"""Invoke ffmpeg for the supplied node graph.
Expand All @@ -324,7 +327,7 @@ def run(
pipe_stderr=capture_stderr,
quiet=quiet,
overwrite_output=overwrite_output,
cwd=cwd
cwd=cwd,
)
out, err = process.communicate(input)
retcode = process.poll()
Expand All @@ -333,4 +336,10 @@ def run(
return out, err


__all__ = ['compile', 'Error', 'get_args', 'run', 'run_async']
__all__ = [
'compile',
'Error',
'get_args',
'run',
'run_async',
]
4 changes: 2 additions & 2 deletions ffmpeg/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ class basestring(with_metaclass(BaseBaseString)):
def _recursive_repr(item):
"""Hack around python `repr` to deterministically represent dictionaries.
This is able to represent more things than json.dumps, since it does not require things to be JSON serializable
(e.g. datetimes).
This is able to represent more things than json.dumps, since it does not require
things to be JSON serializable (e.g. datetimes).
"""
if isinstance(item, basestring):
result = str(item)
Expand Down
4 changes: 2 additions & 2 deletions ffmpeg/_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ def view(stream_spec, detail=False, filename=None, pipe=False, **kwargs):
import graphviz
except ImportError:
raise ImportError(
'failed to import graphviz; please make sure graphviz is installed (e.g. `pip install '
'graphviz`)'
'failed to import graphviz; please make sure graphviz is installed (e.g. '
'`pip install graphviz`)'
)

show_labels = kwargs.pop('show_labels', True)
Expand Down
47 changes: 28 additions & 19 deletions ffmpeg/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,46 +9,55 @@ class DagNode(object):
"""Node in a directed-acyclic graph (DAG).
Edges:
DagNodes are connected by edges. An edge connects two nodes with a label for each side:
DagNodes are connected by edges. An edge connects two nodes with a label for
each side:
- ``upstream_node``: upstream/parent node
- ``upstream_label``: label on the outgoing side of the upstream node
- ``downstream_node``: downstream/child node
- ``downstream_label``: label on the incoming side of the downstream node
For example, DagNode A may be connected to DagNode B with an edge labelled "foo" on A's side, and "bar" on B's
side:
For example, DagNode A may be connected to DagNode B with an edge labelled
"foo" on A's side, and "bar" on B's side:
_____ _____
| | | |
| A >[foo]---[bar]> B |
|_____| |_____|
Edge labels may be integers or strings, and nodes cannot have more than one incoming edge with the same label.
Edge labels may be integers or strings, and nodes cannot have more than one
incoming edge with the same label.
DagNodes may have any number of incoming edges and any number of outgoing edges. DagNodes keep track only of
their incoming edges, but the entire graph structure can be inferred by looking at the furthest downstream
nodes and working backwards.
DagNodes may have any number of incoming edges and any number of outgoing
edges. DagNodes keep track only of their incoming edges, but the entire graph
structure can be inferred by looking at the furthest downstream nodes and
working backwards.
Hashing:
DagNodes must be hashable, and two nodes are considered to be equivalent if they have the same hash value.
DagNodes must be hashable, and two nodes are considered to be equivalent if
they have the same hash value.
Nodes are immutable, and the hash should remain constant as a result. If a node with new contents is required,
create a new node and throw the old one away.
Nodes are immutable, and the hash should remain constant as a result. If a
node with new contents is required, create a new node and throw the old one
away.
String representation:
In order for graph visualization tools to show useful information, nodes must be representable as strings. The
``repr`` operator should provide a more or less "full" representation of the node, and the ``short_repr``
property should be a shortened, concise representation.
In order for graph visualization tools to show useful information, nodes must
be representable as strings. The ``repr`` operator should provide a more or
less "full" representation of the node, and the ``short_repr`` property should
be a shortened, concise representation.
Again, because nodes are immutable, the string representations should remain constant.
Again, because nodes are immutable, the string representations should remain
constant.
"""

def __hash__(self):
"""Return an integer hash of the node."""
raise NotImplementedError()

def __eq__(self, other):
"""Compare two nodes; implementations should return True if (and only if) hashes match."""
"""Compare two nodes; implementations should return True if (and only if)
hashes match.
"""
raise NotImplementedError()

def __repr__(self, other):
Expand All @@ -64,8 +73,9 @@ def short_repr(self):
def incoming_edge_map(self):
"""Provides information about all incoming edges that connect to this node.
The edge map is a dictionary that maps an ``incoming_label`` to ``(outgoing_node, outgoing_label)``. Note that
implicity, ``incoming_node`` is ``self``. See "Edges" section above.
The edge map is a dictionary that maps an ``incoming_label`` to
``(outgoing_node, outgoing_label)``. Note that implicity, ``incoming_node`` is
``self``. See "Edges" section above.
"""
raise NotImplementedError()

Expand Down Expand Up @@ -116,8 +126,7 @@ def get_outgoing_edges(upstream_node, outgoing_edge_map):


class KwargReprNode(DagNode):
"""A DagNode that can be represented as a set of args+kwargs.
"""
"""A DagNode that can be represented as a set of args+kwargs."""

@property
def __upstream_hashes(self):
Expand Down
11 changes: 7 additions & 4 deletions ffmpeg/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ def _get_types_str(types):


class Stream(object):
"""Represents the outgoing edge of an upstream node; may be used to create more downstream nodes."""
"""Represents the outgoing edge of an upstream node; may be used to create more
downstream nodes.
"""

def __init__(
self, upstream_node, upstream_label, node_types, upstream_selector=None
Expand Down Expand Up @@ -214,9 +216,10 @@ def stream(self, label=None, selector=None):
return self.__outgoing_stream_type(self, label, upstream_selector=selector)

def __getitem__(self, item):
"""Create an outgoing stream originating from this node; syntactic sugar for ``self.stream(label)``.
It can also be used to apply a selector: e.g. ``node[0:'a']`` returns a stream with label 0 and
selector ``'a'``, which is the same as ``node.stream(label=0, selector='a')``.
"""Create an outgoing stream originating from this node; syntactic sugar for
``self.stream(label)``. It can also be used to apply a selector: e.g.
``node[0:'a']`` returns a stream with label 0 and selector ``'a'``, which is
the same as ``node.stream(label=0, selector='a')``.
Example:
Process the audio and video portions of a stream independently::
Expand Down
96 changes: 65 additions & 31 deletions ffmpeg/tests/test_ffmpeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,20 @@ def test_stream_repr():
dummy_out.label, dummy_out.node.short_hash
)


def test_repeated_args():
out_file = ffmpeg.input('dummy.mp4').output('dummy2.mp4', streamid=['0:0x101', '1:0x102'])
assert out_file.get_args() == ['-i', 'dummy.mp4', '-streamid', '0:0x101', '-streamid', '1:0x102', 'dummy2.mp4']
out_file = ffmpeg.input('dummy.mp4').output(
'dummy2.mp4', streamid=['0:0x101', '1:0x102']
)
assert out_file.get_args() == [
'-i',
'dummy.mp4',
'-streamid',
'0:0x101',
'-streamid',
'1:0x102',
'dummy2.mp4',
]


def test__get_args__simple():
Expand Down Expand Up @@ -332,8 +343,13 @@ def test_filter_asplit():
'-i',
TEST_INPUT_FILE1,
'-filter_complex',
'[0]vflip[s0];[s0]asplit=2[s1][s2];[s1]atrim=end=20:start=10[s3];[s2]atrim=end=40:start=30[s4];[s3]'
'[s4]concat=n=2[s5]',
(
'[0]vflip[s0];'
'[s0]asplit=2[s1][s2];'
'[s1]atrim=end=20:start=10[s3];'
'[s2]atrim=end=40:start=30[s4];'
'[s3][s4]concat=n=2[s5]'
),
'-map',
'[s5]',
TEST_OUTPUT_FILE1,
Expand All @@ -357,10 +373,14 @@ def test__output__video_size(video_size):


def test_filter_normal_arg_escape():
"""Test string escaping of normal filter args (e.g. ``font`` param of ``drawtext`` filter)."""
"""Test string escaping of normal filter args (e.g. ``font`` param of ``drawtext``
filter).
"""

def _get_drawtext_font_repr(font):
"""Build a command-line arg using drawtext ``font`` param and extract the ``-filter_complex`` arg."""
"""Build a command-line arg using drawtext ``font`` param and extract the
``-filter_complex`` arg.
"""
args = (
ffmpeg.input('in')
.drawtext('test', font='a{}b'.format(font))
Expand All @@ -370,7 +390,9 @@ def _get_drawtext_font_repr(font):
assert args[:3] == ['-i', 'in', '-filter_complex']
assert args[4:] == ['-map', '[s0]', 'out']
match = re.match(
r'\[0\]drawtext=font=a((.|\n)*)b:text=test\[s0\]', args[3], re.MULTILINE
r'\[0\]drawtext=font=a((.|\n)*)b:text=test\[s0\]',
args[3],
re.MULTILINE,
)
assert match is not None, 'Invalid -filter_complex arg: {!r}'.format(args[3])
return match.group(1)
Expand All @@ -394,10 +416,14 @@ def _get_drawtext_font_repr(font):


def test_filter_text_arg_str_escape():
"""Test string escaping of normal filter args (e.g. ``text`` param of ``drawtext`` filter)."""
"""Test string escaping of normal filter args (e.g. ``text`` param of ``drawtext``
filter).
"""

def _get_drawtext_text_repr(text):
"""Build a command-line arg using drawtext ``text`` param and extract the ``-filter_complex`` arg."""
"""Build a command-line arg using drawtext ``text`` param and extract the
``-filter_complex`` arg.
"""
args = ffmpeg.input('in').drawtext('a{}b'.format(text)).output('out').get_args()
assert args[:3] == ['-i', 'in', '-filter_complex']
assert args[4:] == ['-map', '[s0]', 'out']
Expand Down Expand Up @@ -447,8 +473,11 @@ def test__run_async(mocker, pipe_stdin, pipe_stdout, pipe_stderr, cwd):
popen__mock = mocker.patch.object(subprocess, 'Popen', return_value=process__mock)
stream = _get_simple_example()
process = ffmpeg.run_async(
stream, pipe_stdin=pipe_stdin, pipe_stdout=pipe_stdout,
pipe_stderr=pipe_stderr, cwd=cwd
stream,
pipe_stdin=pipe_stdin,
pipe_stdout=pipe_stdout,
pipe_stderr=pipe_stderr,
cwd=cwd,
)
assert process is process__mock

Expand All @@ -458,8 +487,10 @@ def test__run_async(mocker, pipe_stdin, pipe_stdout, pipe_stderr, cwd):
(args,), kwargs = popen__mock.call_args
assert args == ffmpeg.compile(stream)
assert kwargs == dict(
stdin=expected_stdin, stdout=expected_stdout, stderr=expected_stderr,
cwd=cwd
stdin=expected_stdin,
stdout=expected_stdout,
stderr=expected_stderr,
cwd=cwd,
)


Expand Down Expand Up @@ -695,7 +726,10 @@ def test_pipe():

cmd = ['ffmpeg'] + args
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)

in_data = bytes(
Expand All @@ -715,10 +749,10 @@ def test__probe():
assert data['format']['duration'] == '7.036000'


@pytest.mark.skipif(sys.version_info < (3, 3), reason="requires python3.3 or higher")
@pytest.mark.skipif(sys.version_info < (3, 3), reason='requires python3.3 or higher')
def test__probe_timeout():
with pytest.raises(subprocess.TimeoutExpired) as excinfo:
data = ffmpeg.probe(TEST_INPUT_FILE1, timeout=0)
ffmpeg.probe(TEST_INPUT_FILE1, timeout=0)
assert 'timed out after 0 seconds' in str(excinfo.value)


Expand Down Expand Up @@ -751,24 +785,24 @@ def get_filter_complex_outputs(flt, name):


def test__get_filter_complex_input():
assert get_filter_complex_input("", "scale") is None
assert get_filter_complex_input("scale", "scale") is None
assert get_filter_complex_input("scale[s3][s4];etc", "scale") is None
assert get_filter_complex_input("[s2]scale", "scale") == "s2"
assert get_filter_complex_input("[s2]scale;etc", "scale") == "s2"
assert get_filter_complex_input("[s2]scale[s3][s4];etc", "scale") == "s2"
assert get_filter_complex_input('', 'scale') is None
assert get_filter_complex_input('scale', 'scale') is None
assert get_filter_complex_input('scale[s3][s4];etc', 'scale') is None
assert get_filter_complex_input('[s2]scale', 'scale') == 's2'
assert get_filter_complex_input('[s2]scale;etc', 'scale') == 's2'
assert get_filter_complex_input('[s2]scale[s3][s4];etc', 'scale') == 's2'


def test__get_filter_complex_outputs():
assert get_filter_complex_outputs("", "scale") is None
assert get_filter_complex_outputs("scale", "scale") is None
assert get_filter_complex_outputs("scalex[s0][s1]", "scale") is None
assert get_filter_complex_outputs("scale[s0][s1]", "scale") == ['s0', 's1']
assert get_filter_complex_outputs("[s5]scale[s0][s1]", "scale") == ['s0', 's1']
assert get_filter_complex_outputs("[s5]scale[s1][s0]", "scale") == ['s1', 's0']
assert get_filter_complex_outputs("[s5]scale[s1]", "scale") == ['s1']
assert get_filter_complex_outputs("[s5]scale[s1];x", "scale") == ['s1']
assert get_filter_complex_outputs("y;[s5]scale[s1];x", "scale") == ['s1']
assert get_filter_complex_outputs('', 'scale') is None
assert get_filter_complex_outputs('scale', 'scale') is None
assert get_filter_complex_outputs('scalex[s0][s1]', 'scale') is None
assert get_filter_complex_outputs('scale[s0][s1]', 'scale') == ['s0', 's1']
assert get_filter_complex_outputs('[s5]scale[s0][s1]', 'scale') == ['s0', 's1']
assert get_filter_complex_outputs('[s5]scale[s1][s0]', 'scale') == ['s1', 's0']
assert get_filter_complex_outputs('[s5]scale[s1]', 'scale') == ['s1']
assert get_filter_complex_outputs('[s5]scale[s1];x', 'scale') == ['s1']
assert get_filter_complex_outputs('y;[s5]scale[s1];x', 'scale') == ['s1']


def test__multi_output_edge_label_order():
Expand Down
Loading

0 comments on commit fd1da13

Please sign in to comment.