diff --git a/av/filter/__init__.py b/av/filter/__init__.py index 14c8563ce..467307baa 100644 --- a/av/filter/__init__.py +++ b/av/filter/__init__.py @@ -1,3 +1,4 @@ from .filter import Filter, filter_descriptor, filters_available from .graph import Graph as Graph +from .graph import ThreadType as ThreadType from .loudnorm import stats as stats diff --git a/av/filter/graph.py b/av/filter/graph.py index 5cd3bbf06..4fccfaddc 100644 --- a/av/filter/graph.py +++ b/av/filter/graph.py @@ -1,4 +1,5 @@ import warnings +from enum import Flag from fractions import Fraction import cython @@ -12,6 +13,19 @@ from cython.cimports.av.video.frame import VideoFrame +class ThreadType(Flag): + """Threading types for filter graphs. + + Unlike codec threading, filter graphs only support slice-based threading + via :ffmpeg:`AVFILTER_THREAD_SLICE`. + """ + + NONE = 0 + SLICE: "Process multiple parts of the frame concurrently" = ( + lib.AVFILTER_THREAD_SLICE + ) + + @cython.cclass class Graph: def __cinit__(self): @@ -27,6 +41,45 @@ def __dealloc__(self): # This frees the graph, filter contexts, links, etc.. lib.avfilter_graph_free(cython.address(self.ptr)) + @property + def nb_threads(self): + """Maximum number of threads used by filters in this graph. + + Set to 0 for automatic thread count. Must be set before adding any + filters to the graph. + + Wraps :ffmpeg:`AVFilterGraph.nb_threads`. + """ + return self.ptr.nb_threads + + @nb_threads.setter + def nb_threads(self, value: cython.int): + if self.ptr.nb_filters: + raise RuntimeError( + "Cannot change nb_threads after filters have been added." + ) + self.ptr.nb_threads = value + + @property + def thread_type(self): + """One of :class:`.ThreadType`. + + May be set at any point. The setting will apply to all filters + initialized after that. + + Wraps :ffmpeg:`AVFilterGraph.thread_type`. + """ + return ThreadType(self.ptr.thread_type) + + @thread_type.setter + def thread_type(self, value): + if type(value) is int: + self.ptr.thread_type = value + elif type(value) is str: + self.ptr.thread_type = ThreadType[value].value + else: + self.ptr.thread_type = value.value + @cython.cfunc def _get_unique_name(self, name: str) -> str: count = self._name_counts.get(name, 0) diff --git a/av/filter/graph.pyi b/av/filter/graph.pyi index e170c2ce7..4d405babd 100644 --- a/av/filter/graph.pyi +++ b/av/filter/graph.pyi @@ -1,5 +1,6 @@ +from enum import Flag from fractions import Fraction -from typing import Any +from typing import Any, ClassVar, cast from av.audio.format import AudioFormat from av.audio.frame import AudioFrame @@ -12,8 +13,16 @@ from av.video.stream import VideoStream from .context import FilterContext from .filter import Filter +class ThreadType(Flag): + NONE = cast(ClassVar[ThreadType], ...) + SLICE = cast(ClassVar[ThreadType], ...) + def __get__(self, i: object | None, owner: type | None = None) -> ThreadType: ... + def __set__(self, instance: object, value: int | str | ThreadType) -> None: ... + class Graph: configured: bool + nb_threads: int + thread_type: ThreadType def __init__(self) -> None: ... def configure(self, auto_buffer: bool = True, force: bool = False) -> None: ... diff --git a/include/avfilter.pxd b/include/avfilter.pxd index 5eb11ecc2..1a493ef25 100644 --- a/include/avfilter.pxd +++ b/include/avfilter.pxd @@ -11,6 +11,9 @@ cdef extern from "libavfilter/avfilter.h" nogil: cdef unsigned avfilter_filter_pad_count(const AVFilter *filter, int is_output) + cdef enum: + AVFILTER_THREAD_SLICE + cdef struct AVFilter: const char *name const char *description @@ -58,6 +61,8 @@ cdef extern from "libavfilter/avfilter.h" nogil: cdef struct AVFilterGraph: int nb_filters AVFilterContext **filters + int thread_type + int nb_threads cdef struct AVFilterInOut: char *name diff --git a/tests/test_filters.py b/tests/test_filters.py index bd74a633a..5cf7fc146 100644 --- a/tests/test_filters.py +++ b/tests/test_filters.py @@ -6,7 +6,7 @@ import av from av import AudioFrame, VideoFrame from av.audio.frame import format_dtypes -from av.filter import Filter, Graph +from av.filter import Filter, Graph, ThreadType from .common import TestCase, has_pillow @@ -193,11 +193,10 @@ def test_audio_buffer_volume_filter(self): assert np.allclose(input_data * 0.5, output_data) - def test_video_buffer(self): + def _test_video_buffer(self, graph): input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") input_video_stream = input_container.streams.video[0] - graph = av.filter.Graph() buffer = graph.add_buffer(template=input_video_stream) bwdif = graph.add("bwdif", "send_field:tff:all") buffersink = graph.add("buffersink") @@ -223,6 +222,15 @@ def test_video_buffer(self): assert filtered_frames[1].pts == (frame.pts - 1) * 2 + 1 assert filtered_frames[1].time_base == Fraction(1, 60) + def test_video_buffer(self): + self._test_video_buffer(av.filter.Graph()) + + def test_video_buffer_threading(self): + graph = av.filter.Graph() + graph.nb_threads = 4 + graph.thread_type = ThreadType.SLICE + self._test_video_buffer(graph) + def test_EOF(self) -> None: input_container = av.open(format="lavfi", file="color=c=pink:duration=1:r=30") video_stream = input_container.streams.video[0] @@ -246,3 +254,30 @@ def test_EOF(self) -> None: assert isinstance(palette_frame, av.VideoFrame) assert palette_frame.width == 16 assert palette_frame.height == 16 + + def test_graph_nb_threads(self) -> None: + graph = Graph() + assert graph.nb_threads == 0 + + graph.nb_threads = 4 + assert graph.nb_threads == 4 + + graph.add("testsrc") + + with self.assertRaises(RuntimeError): + graph.nb_threads = 2 + + def test_graph_thread_type(self) -> None: + graph = Graph() + assert graph.thread_type == ThreadType.SLICE + + graph.thread_type = ThreadType.NONE + assert graph.thread_type == ThreadType.NONE + + # thread_type can be set at any point, even after configuring + src = graph.add("testsrc") + src.link_to(graph.add("buffersink")) + graph.configure() + + graph.thread_type = ThreadType.SLICE + assert graph.thread_type == ThreadType.SLICE