summaryrefslogtreecommitdiff
path: root/libavformat/options_table.h
diff options
context:
space:
mode:
authorLuca Barbato <lu_zero@gentoo.org>2014-01-20 13:28:37 +0100
committerAnton Khirnov <anton@khirnov.net>2014-02-04 22:01:42 +0100
commitd9ae1031f5edbd25c8526b4cb51aba66d3bee931 (patch)
tree8bf19706029f996334e900cb10e47ec97c1d1db7 /libavformat/options_table.h
parent4c3e1956ee35fdcc5ffdb28782050164b4623c0b (diff)
lavf: improve handling of sparse streams when muxing
Currently ff_interleave_packet_per_dts() waits until it gets a frame for each stream before outputting packets in interleaved order. Sparse streams (i.e. streams with much fewer packets than the other streams, like subtitles or audio with DTX) tend to add up latency and in specific cases end up allocating a large amount of memory. Emit the top packet from the packet_buffer if it has a time delta larger than a specified threshold. Original report of the issue and initial proposed solution by mus.svz@gmail.com. Bug-id: 31 Signed-off-by: Anton Khirnov <anton@khirnov.net>
Diffstat (limited to 'libavformat/options_table.h')
-rw-r--r--libavformat/options_table.h1
1 files changed, 1 insertions, 0 deletions
diff --git a/libavformat/options_table.h b/libavformat/options_table.h
index 54bf25fc27..cc84e1c02f 100644
--- a/libavformat/options_table.h
+++ b/libavformat/options_table.h
@@ -58,6 +58,7 @@ static const AVOption avformat_options[] = {
{"bitstream", "detect bitstream specification deviations", 0, AV_OPT_TYPE_CONST, {.i64 = AV_EF_BITSTREAM }, INT_MIN, INT_MAX, D, "err_detect"},
{"buffer", "detect improper bitstream length", 0, AV_OPT_TYPE_CONST, {.i64 = AV_EF_BUFFER }, INT_MIN, INT_MAX, D, "err_detect"},
{"explode", "abort decoding on minor error detection", 0, AV_OPT_TYPE_CONST, {.i64 = AV_EF_EXPLODE }, INT_MIN, INT_MAX, D, "err_detect"},
+{"max_interleave_delta", "maximum buffering duration for interleaving", OFFSET(max_interleave_delta), AV_OPT_TYPE_INT64, { .i64 = 10000000 }, 0, INT64_MAX, E },
{NULL},
};