This commit is contained in:
MrBesen 2019-10-10 16:34:08 +02:00
commit 3267eae9b9
68 changed files with 629 additions and 354 deletions

View File

@ -688,6 +688,10 @@ Unused delays will be silently ignored. If number of given delays is
smaller than number of channels all remaining channels will not be delayed.
If you want to delay exact number of samples, append 'S' to number.
If you want instead to delay in seconds, append 's' to number.
@item all
Use last set delay for all remaining channels. By default is disabled.
This option if enabled changes how option @code{delays} is interpreted.
@end table
@subsection Examples
@ -706,6 +710,12 @@ the first channel (and any other channels that may be present) unchanged.
@example
adelay=0|500S|700S
@end example
@item
Delay all channels by same number of samples:
@example
adelay=delays=64S:all=1
@end example
@end itemize
@section aderivative, aintegral
@ -1143,6 +1153,18 @@ Leave almost only low frequencies in audio:
@example
afftfilt="'real=re * (1-clip((b/nb)*b,0,1))':imag='im * (1-clip((b/nb)*b,0,1))'"
@end example
@item
Apply robotize effect:
@example
afftfilt="real='hypot(re,im)*sin(0)':imag='hypot(re,im)*cos(0)':win_size=512:overlap=0.75"
@end example
@item
Apply whisper effect:
@example
afftfilt="real='hypot(re,im)*cos((random(0)*2-1)*2*3.14)':imag='hypot(re,im)*sin((random(1)*2-1)*2*3.14)':win_size=128:overlap=0.8"
@end example
@end itemize
@anchor{afir}
@ -1860,12 +1882,16 @@ Default value is @var{o}.
@itemize
@item
One of many usages of this filter is noise reduction, input audio is filtered
with same samples that are delayed by fixed ammount, one such example for stereo audio is:
with same samples that are delayed by fixed amount, one such example for stereo audio is:
@example
asplit[a][b],[a]adelay=32S|32S[a],[b][a]anlms=order=128:leakage=0.0005:mu=.5:out_mode=o
@end example
@end itemize
@subsection Commands
This filter supports the same commands as options, excluding option @code{order}.
@section anull
Pass the audio source unchanged to the output.
@ -2403,6 +2429,15 @@ atempo=sqrt(3),atempo=sqrt(3)
@end example
@end itemize
@subsection Commands
This filter supports the following commands:
@table @option
@item tempo
Change filter tempo scale factor.
Syntax for the command is : "@var{tempo}"
@end table
@section atrim
Trim the input so that the output contains one continuous subpart of the input.
@ -4462,6 +4497,19 @@ Possible values are:
@end table
@end table
@subsection Commands
This filter supports the following commands:
@table @option
@item tempo
Change filter tempo scale factor.
Syntax for the command is : "@var{tempo}"
@item pitch
Change filter pitch scale factor.
Syntax for the command is : "@var{pitch}"
@end table
@section sidechaincompress
This filter acts like normal compressor but has the ability to compress

View File

@ -105,12 +105,14 @@ It takes a single signed native-endian 16-bit raw audio stream of at most 2 chan
@table @option
@item silence_threshold
Threshold for detecting silence, ranges from -1 to 32767. -1 disables silence detection and
is required for use with the AcoustID service. Default is -1.
Threshold for detecting silence. Range is from -1 to 32767, where -1 disables
silence detection. Silence detection can only be used with version 3 of the
algorithm.
Silence detection must be disabled for use with the AcoustID service. Default is -1.
@item algorithm
Version of algorithm to fingerprint with. Range is 0 to 4. Version 2 requires that silence
detection be enabled. Default is 1.
Version of algorithm to fingerprint with. Range is 0 to 4.
Version 3 enables silence detection. Default is 1.
@item fp_format
Format to output the fingerprint as. Accepts the following options:

View File

@ -1821,18 +1821,31 @@ static void print_report(int is_last_report, int64_t timer_start, int64_t cur_ti
//get fps
fps = t > 1 ? frame_number / t : -1;
if (fps <= 0 || max_frames_hint < 0) {
av_bprintf(&buf, "ETA=N/A");
} else {
if (fps > 0 && max_frames_hint > 0) {
//get remaining frames
int64_t remaining_frames = max_frames_hint - frame_number;
secs = remaining_frames / fps;
mins = secs / 60;
secs %= 60;
hours = mins / 60;
mins %= 60;
av_bprintf(&buf, "Remaining=%"PRId64" ETA=%02d:%02d:%02d", remaining_frames, hours, mins, secs);
//calculate finish date
time_t rawtime;
struct tm * timeinfo;
time(&rawtime);
timeinfo = localtime(&rawtime);
time_t time = mktime(timeinfo);
time_t timef = time + secs;
timeinfo = localtime(&timef);
secs %= 60;
char* timebuf[64];
if(timef - time > 86400)
strftime(timebuf, 64, "%F %T", timeinfo);
else
strftime(timebuf, 64, "%T", timeinfo);
av_bprintf(&buf, "Remaining=%"PRId64" ETA=%02d:%02d:%02d finish=%s", remaining_frames, hours, mins, secs, timebuf);
}
if (print_stats || is_last_report) {

View File

@ -56,6 +56,7 @@ static const int8_t probs_code_pred_coeff[3][3] = {
typedef struct ArithCoder {
unsigned int a;
unsigned int c;
int overread;
} ArithCoder;
typedef struct Table {
@ -172,6 +173,7 @@ static void ac_init(ArithCoder *ac, GetBitContext *gb)
{
ac->a = 4095;
ac->c = get_bits(gb, 12);
ac->overread = 0;
}
static av_always_inline void ac_get(ArithCoder *ac, GetBitContext *gb, int p, int *e)
@ -191,6 +193,8 @@ static av_always_inline void ac_get(ArithCoder *ac, GetBitContext *gb, int p, in
if (ac->a < 2048) {
int n = 11 - av_log2(ac->a);
ac->a <<= n;
if (get_bits_left(gb) < n)
ac->overread ++;
ac->c = (ac->c << n) | get_bits(gb, n);
}
}
@ -339,6 +343,9 @@ static int decode_frame(AVCodecContext *avctx, void *data,
prob = 128;
}
if (ac->overread > 16)
return AVERROR_INVALIDDATA;
ac_get(ac, gb, prob, &residual);
v = ((predict >> 15) ^ residual) & 1;
dsd[((i >> 3) * channels + ch) << 2] |= v << (7 - (i & 0x7 ));

View File

@ -755,7 +755,6 @@ static int dxv_decompress_cocg(DXVContext *ctx, GetByteContext *gb,
skip0 = dxv_decompress_opcodes(gb, op_data0, op_size0);
if (skip0 < 0)
return skip0;
bytestream2_seek(gb, data_start + op_offset + skip0 - 12, SEEK_SET);
if (op_size1 > max_op_size1)
return AVERROR_INVALIDDATA;
skip1 = dxv_decompress_opcodes(gb, op_data1, op_size1);
@ -784,7 +783,7 @@ static int dxv_decompress_cocg(DXVContext *ctx, GetByteContext *gb,
return ret;
}
bytestream2_seek(gb, data_start + op_offset + skip0 + skip1 - 12, SEEK_SET);
bytestream2_seek(gb, data_start - 12 + op_offset + skip0 + skip1, SEEK_SET);
return 0;
}

View File

@ -1307,6 +1307,7 @@ static int decode_header(EXRContext *s, AVFrame *frame)
int magic_number, version, i, flags, sar = 0;
int layer_match = 0;
int ret;
int dup_channels = 0;
s->current_channel_offset = 0;
s->xmin = ~0;
@ -1465,10 +1466,12 @@ static int decode_header(EXRContext *s, AVFrame *frame)
s->pixel_type = current_pixel_type;
s->channel_offsets[channel_index] = s->current_channel_offset;
} else if (channel_index >= 0) {
av_log(s->avctx, AV_LOG_ERROR,
av_log(s->avctx, AV_LOG_WARNING,
"Multiple channels with index %d.\n", channel_index);
ret = AVERROR_INVALIDDATA;
goto fail;
if (++dup_channels > 10) {
ret = AVERROR_INVALIDDATA;
goto fail;
}
}
s->channels = av_realloc(s->channels,

View File

@ -143,7 +143,7 @@ static int fits_read_header(AVCodecContext *avctx, const uint8_t **ptr, FITSHead
size = abs(header->bitpix) >> 3;
for (i = 0; i < header->naxis; i++) {
if (size && header->naxisn[i] > SIZE_MAX / size) {
if (size == 0 || header->naxisn[i] > SIZE_MAX / size) {
av_log(avctx, AV_LOG_ERROR, "unsupported size of FITS image");
return AVERROR_INVALIDDATA;
}

View File

@ -58,8 +58,9 @@
typedef struct FLACHeaderMarker {
int offset; /**< byte offset from start of FLACParseContext->buffer */
int *link_penalty; /**< pointer to array of local scores between this header
and the one at a distance equal array position */
int link_penalty[FLAC_MAX_SEQUENTIAL_HEADERS]; /**< array of local scores
between this header and the one at a distance equal
array position */
int max_score; /**< maximum score found after checking each child that
has a valid CRC */
FLACFrameInfo fi; /**< decoded frame header info */
@ -112,8 +113,8 @@ static int frame_header_is_valid(AVCodecContext *avctx, const uint8_t *buf,
* This function is based on av_fifo_generic_read, which is why there is a comment
* about a memory barrier for SMP.
*/
static uint8_t* flac_fifo_read_wrap(FLACParseContext *fpc, int offset, int len,
uint8_t** wrap_buf, int* allocated_size)
static uint8_t *flac_fifo_read_wrap(FLACParseContext *fpc, int offset, int len,
uint8_t **wrap_buf, int *allocated_size)
{
AVFifoBuffer *f = fpc->fifo_buf;
uint8_t *start = f->rptr + offset;
@ -152,7 +153,7 @@ static uint8_t* flac_fifo_read_wrap(FLACParseContext *fpc, int offset, int len,
* A second call to flac_fifo_read (with new offset and len) should be called
* to get the post-wrap buf if the returned len is less than the requested.
**/
static uint8_t* flac_fifo_read(FLACParseContext *fpc, int offset, int *len)
static uint8_t *flac_fifo_read(FLACParseContext *fpc, int offset, int *len)
{
AVFifoBuffer *f = fpc->fifo_buf;
uint8_t *start = f->rptr + offset;
@ -188,16 +189,8 @@ static int find_headers_search_validate(FLACParseContext *fpc, int offset)
"couldn't allocate FLACHeaderMarker\n");
return AVERROR(ENOMEM);
}
(*end_handle)->fi = fi;
(*end_handle)->offset = offset;
(*end_handle)->link_penalty = av_malloc(sizeof(int) *
FLAC_MAX_SEQUENTIAL_HEADERS);
if (!(*end_handle)->link_penalty) {
av_freep(end_handle);
av_log(fpc->avctx, AV_LOG_ERROR,
"couldn't allocate link_penalty\n");
return AVERROR(ENOMEM);
}
(*end_handle)->fi = fi;
(*end_handle)->offset = offset;
for (i = 0; i < FLAC_MAX_SEQUENTIAL_HEADERS; i++)
(*end_handle)->link_penalty[i] = FLAC_HEADER_NOT_PENALIZED_YET;
@ -208,9 +201,8 @@ static int find_headers_search_validate(FLACParseContext *fpc, int offset)
return size;
}
static int find_headers_search(FLACParseContext *fpc, uint8_t *buf, int buf_size,
int search_start)
static int find_headers_search(FLACParseContext *fpc, uint8_t *buf,
int buf_size, int search_start)
{
int size = 0, mod_offset = (buf_size - 1) % 4, i, j;
uint32_t x;
@ -221,7 +213,7 @@ static int find_headers_search(FLACParseContext *fpc, uint8_t *buf, int buf_size
}
for (; i < buf_size - 1; i += 4) {
x = AV_RB32(buf + i);
x = AV_RN32(buf + i);
if (((x & ~(x + 0x01010101)) & 0x80808080)) {
for (j = 0; j < 4; j++) {
if ((AV_RB16(buf + i + j) & 0xFFFE) == 0xFFF8)
@ -251,9 +243,9 @@ static int find_new_headers(FLACParseContext *fpc, int search_start)
uint8_t wrap[2];
wrap[0] = buf[read_len - 1];
read_len = search_end - search_start + 1;
/* search_start + 1 is the post-wrap offset in the fifo. */
read_len = search_end - (search_start + 1) + 1;
buf = flac_fifo_read(fpc, search_start + 1, &read_len);
wrap[1] = buf[0];
@ -321,7 +313,7 @@ static int check_header_mismatch(FLACParseContext *fpc,
(child_fi->frame_or_sample_num
!= header_fi->frame_or_sample_num + 1)) {
FLACHeaderMarker *curr;
int expected_frame_num, expected_sample_num;
int64_t expected_frame_num, expected_sample_num;
/* If there are frames in the middle we expect this deduction,
as they are probably valid and this one follows it */
@ -471,7 +463,7 @@ static void score_sequences(FLACParseContext *fpc)
}
}
static int get_best_header(FLACParseContext* fpc, const uint8_t **poutbuf,
static int get_best_header(FLACParseContext *fpc, const uint8_t **poutbuf,
int *poutbuf_size)
{
FLACHeaderMarker *header = fpc->best_header;
@ -497,7 +489,7 @@ static int get_best_header(FLACParseContext* fpc, const uint8_t **poutbuf,
&fpc->wrap_buf_allocated_size);
if (fpc->pc->flags & PARSER_FLAG_USE_CODEC_TS){
if (fpc->pc->flags & PARSER_FLAG_USE_CODEC_TS) {
if (header->fi.is_var_size)
fpc->pc->pts = header->fi.frame_or_sample_num;
else if (header->best_child)
@ -531,7 +523,7 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
s->duration = fi.blocksize;
if (!avctx->sample_rate)
avctx->sample_rate = fi.samplerate;
if (fpc->pc->flags & PARSER_FLAG_USE_CODEC_TS){
if (fpc->pc->flags & PARSER_FLAG_USE_CODEC_TS) {
fpc->pc->pts = fi.frame_or_sample_num;
if (!fi.is_var_size)
fpc->pc->pts *= fi.blocksize;
@ -559,7 +551,6 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
curr->max_score, curr->offset, curr->next->offset);
}
temp = curr->next;
av_freep(&curr->link_penalty);
av_free(curr);
fpc->nb_headers_buffered--;
}
@ -570,7 +561,6 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
for (curr = best_child->next; curr; curr = curr->next)
curr->offset -= best_child->offset;
fpc->nb_headers_buffered--;
best_child->offset = 0;
fpc->headers = best_child;
if (fpc->nb_headers_buffered >= FLAC_MIN_HEADERS) {
@ -584,30 +574,26 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
for (curr = fpc->headers; curr != fpc->best_header; curr = temp) {
temp = curr->next;
av_freep(&curr->link_penalty);
av_free(curr);
fpc->nb_headers_buffered--;
}
fpc->headers = fpc->best_header->next;
av_freep(&fpc->best_header->link_penalty);
av_freep(&fpc->best_header);
fpc->nb_headers_buffered--;
}
/* Find and score new headers. */
/* buf_size is to zero when padding, so check for this since we do */
/* buf_size is zero when flushing, so check for this since we do */
/* not want to try to read more input once we have found the end. */
/* Note that as (non-modified) parameters, buf can be non-NULL, */
/* while buf_size is 0. */
while ((buf && buf_size && read_end < buf + buf_size &&
/* Also note that buf can't be NULL. */
while ((buf_size && read_end < buf + buf_size &&
fpc->nb_headers_buffered < FLAC_MIN_HEADERS)
|| ((!buf || !buf_size) && !fpc->end_padded)) {
|| (!buf_size && !fpc->end_padded)) {
int start_offset;
/* Pad the end once if EOF, to check the final region for headers. */
if (!buf || !buf_size) {
fpc->end_padded = 1;
buf_size = MAX_FRAME_HEADER_SIZE;
if (!buf_size) {
fpc->end_padded = 1;
read_end = read_start + MAX_FRAME_HEADER_SIZE;
} else {
/* The maximum read size is the upper-bound of what the parser
@ -635,7 +621,7 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
goto handle_error;
}
if (buf && buf_size) {
if (buf_size) {
av_fifo_generic_write(fpc->fifo_buf, (void*) read_start,
read_end - read_start, NULL);
} else {
@ -658,7 +644,7 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
fpc->nb_headers_buffered = nb_headers;
/* Wait till FLAC_MIN_HEADERS to output a valid frame. */
if (!fpc->end_padded && fpc->nb_headers_buffered < FLAC_MIN_HEADERS) {
if (buf && read_end < buf + buf_size) {
if (read_end < buf + buf_size) {
read_start = read_end;
continue;
} else {
@ -680,7 +666,6 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
fpc->fifo_buf->wptr += fpc->fifo_buf->end -
fpc->fifo_buf->buffer;
}
buf_size = 0;
read_start = read_end = NULL;
}
}
@ -693,7 +678,7 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
if (fpc->best_header && fpc->best_header->max_score <= 0) {
// Only accept a bad header if there is no other option to continue
if (!buf_size || !buf || read_end != buf || fpc->nb_headers_buffered < FLAC_MIN_HEADERS)
if (!buf_size || read_end != buf || fpc->nb_headers_buffered < FLAC_MIN_HEADERS)
fpc->best_header = NULL;
}
@ -705,13 +690,13 @@ static int flac_parse(AVCodecParserContext *s, AVCodecContext *avctx,
fpc->best_header->offset);
/* Set duration to 0. It is unknown or invalid in a junk frame. */
s->duration = 0;
*poutbuf_size = fpc->best_header->offset;
*poutbuf = flac_fifo_read_wrap(fpc, 0, *poutbuf_size,
&fpc->wrap_buf,
&fpc->wrap_buf_allocated_size);
s->duration = 0;
*poutbuf_size = fpc->best_header->offset;
*poutbuf = flac_fifo_read_wrap(fpc, 0, *poutbuf_size,
&fpc->wrap_buf,
&fpc->wrap_buf_allocated_size);
return buf_size ? (read_end - buf) : (fpc->best_header->offset -
av_fifo_size(fpc->fifo_buf));
av_fifo_size(fpc->fifo_buf));
}
if (!buf_size)
return get_best_header(fpc, poutbuf, poutbuf_size);
@ -745,10 +730,10 @@ static void flac_parse_close(AVCodecParserContext *c)
while (curr) {
temp = curr->next;
av_freep(&curr->link_penalty);
av_free(curr);
curr = temp;
}
fpc->headers = NULL;
av_fifo_freep(&fpc->fifo_buf);
av_freep(&fpc->wrap_buf);
}

View File

@ -1218,6 +1218,11 @@ int ff_h263_decode_picture_header(MpegEncContext *s)
if ((ret = av_image_check_size(s->width, s->height, 0, s)) < 0)
return ret;
if (!(s->avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
if ((s->width * s->height / 256 / 8) > get_bits_left(&s->gb))
return AVERROR_INVALIDDATA;
}
s->mb_width = (s->width + 15) / 16;
s->mb_height = (s->height + 15) / 16;
s->mb_num = s->mb_width * s->mb_height;

View File

@ -222,7 +222,7 @@ static inline int ls_get_code_runterm(GetBitContext *gb, JLSState *state,
/**
* Decode one line of image
*/
static inline void ls_decode_line(JLSState *state, MJpegDecodeContext *s,
static inline int ls_decode_line(JLSState *state, MJpegDecodeContext *s,
void *last, void *dst, int last2, int w,
int stride, int comp, int bits)
{
@ -234,7 +234,7 @@ static inline void ls_decode_line(JLSState *state, MJpegDecodeContext *s,
int err, pred;
if (get_bits_left(&s->gb) <= 0)
return;
return AVERROR_INVALIDDATA;
/* compute gradients */
Ra = x ? R(dst, x - stride) : R(last, x);
@ -263,11 +263,11 @@ static inline void ls_decode_line(JLSState *state, MJpegDecodeContext *s,
}
/* if EOL reached, we stop decoding */
if (r != 1 << ff_log2_run[state->run_index[comp]])
return;
return 0;
if (state->run_index[comp] < 31)
state->run_index[comp]++;
if (x + stride > w)
return;
return 0;
}
/* decode aborted run */
r = ff_log2_run[state->run_index[comp]];
@ -284,7 +284,7 @@ static inline void ls_decode_line(JLSState *state, MJpegDecodeContext *s,
if (x >= w) {
av_log(NULL, AV_LOG_ERROR, "run overflow\n");
av_assert0(x <= w);
return;
return AVERROR_INVALIDDATA;
}
/* decode run termination value */
@ -341,6 +341,8 @@ static inline void ls_decode_line(JLSState *state, MJpegDecodeContext *s,
W(dst, x, pred);
x += stride;
}
return 0;
}
int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
@ -350,6 +352,7 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
uint8_t *zero, *last, *cur;
JLSState *state;
int off = 0, stride = 1, width, shift, ret = 0;
int decoded_height = 0;
zero = av_mallocz(s->picture_ptr->linesize[0]);
if (!zero)
@ -407,13 +410,16 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
width = s->width * stride;
cur += off;
for (i = 0; i < s->height; i++) {
int ret;
if (s->bits <= 8) {
ls_decode_line(state, s, last, cur, t, width, stride, off, 8);
ret = ls_decode_line(state, s, last, cur, t, width, stride, off, 8);
t = last[0];
} else {
ls_decode_line(state, s, last, cur, t, width, stride, off, 16);
ret = ls_decode_line(state, s, last, cur, t, width, stride, off, 16);
t = *((uint16_t *)last);
}
if (ret < 0)
break;
last = cur;
cur += s->picture_ptr->linesize[0];
@ -422,6 +428,7 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
skip_bits(&s->gb, 16); /* skip RSTn */
}
}
decoded_height = i;
} else if (ilv == 1) { /* line interleaving */
int j;
int Rc[3] = { 0, 0, 0 };
@ -429,9 +436,12 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
memset(cur, 0, s->picture_ptr->linesize[0]);
width = s->width * stride;
for (i = 0; i < s->height; i++) {
int ret;
for (j = 0; j < stride; j++) {
ls_decode_line(state, s, last + j, cur + j,
ret = ls_decode_line(state, s, last + j, cur + j,
Rc[j], width, stride, j, 8);
if (ret < 0)
break;
Rc[j] = last[j];
if (s->restart_interval && !--s->restart_count) {
@ -439,9 +449,12 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
skip_bits(&s->gb, 16); /* skip RSTn */
}
}
if (ret < 0)
break;
last = cur;
cur += s->picture_ptr->linesize[0];
}
decoded_height = i;
} else if (ilv == 2) { /* sample interleaving */
avpriv_report_missing_feature(s->avctx, "Sample interleaved images");
ret = AVERROR_PATCHWELCOME;
@ -507,7 +520,7 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
if (s->bits <= 8) {
uint8_t *src = s->picture_ptr->data[0];
for (i = 0; i < s->height; i++) {
for (i = 0; i < decoded_height; i++) {
for (x = off; x < w; x += stride)
src[x] <<= shift;
src += s->picture_ptr->linesize[0];
@ -515,7 +528,7 @@ int ff_jpegls_decode_picture(MJpegDecodeContext *s, int near,
} else {
uint16_t *src = (uint16_t *)s->picture_ptr->data[0];
for (i = 0; i < s->height; i++) {
for (i = 0; i < decoded_height; i++) {
for (x = 0; x < w; x++)
src[x] <<= shift;
src += s->picture_ptr->linesize[0] / 2;

View File

@ -515,6 +515,66 @@ static void set_color_range(AVCodecContext *avctx)
#endif
#endif
/**
* Set the target bitrate to VPX library default. Also set CRF to 32 if needed.
*/
static void set_vp8_defaults(AVCodecContext *avctx,
struct vpx_codec_enc_cfg *enccfg)
{
VPxContext *ctx = avctx->priv_data;
av_assert0(!avctx->bit_rate);
avctx->bit_rate = enccfg->rc_target_bitrate * 1000;
if (enccfg->rc_end_usage == VPX_CQ) {
av_log(avctx, AV_LOG_WARNING,
"Bitrate not specified for constrained quality mode, using default of %dkbit/sec\n",
enccfg->rc_target_bitrate);
} else {
enccfg->rc_end_usage = VPX_CQ;
ctx->crf = 32;
av_log(avctx, AV_LOG_WARNING,
"Neither bitrate nor constrained quality specified, using default CRF of %d and bitrate of %dkbit/sec\n",
ctx->crf, enccfg->rc_target_bitrate);
}
}
#if CONFIG_LIBVPX_VP9_ENCODER
/**
* Keep the target bitrate at 0 to engage constant quality mode. If CRF is not
* set, use 32.
*/
static void set_vp9_defaults(AVCodecContext *avctx,
struct vpx_codec_enc_cfg *enccfg)
{
VPxContext *ctx = avctx->priv_data;
av_assert0(!avctx->bit_rate);
if (enccfg->rc_end_usage != VPX_Q && ctx->lossless < 0) {
enccfg->rc_end_usage = VPX_Q;
ctx->crf = 32;
av_log(avctx, AV_LOG_WARNING,
"Neither bitrate nor constrained quality specified, using default CRF of %d\n",
ctx->crf);
}
}
#endif
/**
* Called when the bitrate is not set. It sets appropriate default values for
* bitrate and CRF.
*/
static void set_vpx_defaults(AVCodecContext *avctx,
struct vpx_codec_enc_cfg *enccfg)
{
av_assert0(!avctx->bit_rate);
#if CONFIG_LIBVPX_VP9_ENCODER
if (avctx->codec_id == AV_CODEC_ID_VP9) {
set_vp9_defaults(avctx, enccfg);
return;
}
#endif
set_vp8_defaults(avctx, enccfg);
}
static av_cold int vpx_init(AVCodecContext *avctx,
const struct vpx_codec_iface *iface)
{
@ -585,18 +645,9 @@ static av_cold int vpx_init(AVCodecContext *avctx,
if (avctx->bit_rate) {
enccfg.rc_target_bitrate = av_rescale_rnd(avctx->bit_rate, 1, 1000,
AV_ROUND_NEAR_INF);
#if CONFIG_LIBVPX_VP9_ENCODER
} else if (enccfg.rc_end_usage == VPX_Q) {
#endif
} else {
if (enccfg.rc_end_usage == VPX_CQ) {
enccfg.rc_target_bitrate = 1000000;
} else {
avctx->bit_rate = enccfg.rc_target_bitrate * 1000;
av_log(avctx, AV_LOG_WARNING,
"Neither bitrate nor constrained quality specified, using default bitrate of %dkbit/sec\n",
enccfg.rc_target_bitrate);
}
// Set bitrate to default value. Also sets CRF to default if needed.
set_vpx_defaults(avctx, &enccfg);
}
if (avctx->codec_id == AV_CODEC_ID_VP9 && ctx->lossless == 1) {
@ -1459,6 +1510,7 @@ static const AVOption vp9_options[] = {
#undef LEGACY_OPTIONS
static const AVCodecDefault defaults[] = {
{ "b", "0" },
{ "qmin", "-1" },
{ "qmax", "-1" },
{ "g", "-1" },

View File

@ -547,10 +547,7 @@ static int magy_decode_frame(AVCodecContext *avctx, void *data,
s->hshift[2] =
s->vshift[2] = 0;
s->decorrelate = 0;
s->max = 256;
s->bps = 8;
s->huff_build = huff_build;
s->magy_decode_slice = magy_decode_slice;
format = bytestream2_get_byte(&gbyte);
switch (format) {
@ -587,61 +584,46 @@ static int magy_decode_frame(AVCodecContext *avctx, void *data,
avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
s->hshift[1] =
s->hshift[2] = 1;
s->max = 1024;
s->huff_build = huff_build10;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 10;
break;
case 0x76:
avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
s->max = 1024;
s->huff_build = huff_build10;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 10;
break;
case 0x6d:
avctx->pix_fmt = AV_PIX_FMT_GBRP10;
s->decorrelate = 1;
s->max = 1024;
s->huff_build = huff_build10;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 10;
break;
case 0x6e:
avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
s->decorrelate = 1;
s->max = 1024;
s->huff_build = huff_build10;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 10;
break;
case 0x6f:
avctx->pix_fmt = AV_PIX_FMT_GBRP12;
s->decorrelate = 1;
s->max = 4096;
s->huff_build = huff_build12;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 12;
break;
case 0x70:
avctx->pix_fmt = AV_PIX_FMT_GBRAP12;
s->decorrelate = 1;
s->max = 4096;
s->huff_build = huff_build12;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 12;
break;
case 0x73:
avctx->pix_fmt = AV_PIX_FMT_GRAY10;
s->max = 1024;
s->huff_build = huff_build10;
s->magy_decode_slice = magy_decode_slice10;
s->bps = 10;
break;
default:
avpriv_request_sample(avctx, "Format 0x%X", format);
return AVERROR_PATCHWELCOME;
}
s->max = 1 << s->bps;
s->magy_decode_slice = s->bps == 8 ? magy_decode_slice : magy_decode_slice10;
if ( s->bps == 8)
s->huff_build = huff_build;
else
s->huff_build = s->bps == 10 ? huff_build10 : huff_build12;
s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
bytestream2_skip(&gbyte, 1);

View File

@ -165,7 +165,7 @@ void ff_convert_matrix(MpegEncContext *s, int (*qmat)[64],
}
}
if (shift) {
av_log(NULL, AV_LOG_INFO,
av_log(s->avctx, AV_LOG_INFO,
"Warning, QMAT_SHIFT is larger than %d, overflows possible\n",
QMAT_SHIFT - shift);
}

View File

@ -264,6 +264,9 @@ static av_cold int pcm_decode_init(AVCodecContext *avctx)
break;
case AV_CODEC_ID_PCM_F16LE:
case AV_CODEC_ID_PCM_F24LE:
if (avctx->bits_per_coded_sample < 1 || avctx->bits_per_coded_sample > 24)
return AVERROR_INVALIDDATA;
s->scale = 1. / (1 << (avctx->bits_per_coded_sample - 1));
s->fdsp = avpriv_float_dsp_alloc(0);
if (!s->fdsp)

View File

@ -424,7 +424,7 @@ static int png_decode_idat(PNGDecContext *s, int length)
s->zstream.next_out = s->crow_buf;
}
if (ret == Z_STREAM_END && s->zstream.avail_in > 0) {
av_log(NULL, AV_LOG_WARNING,
av_log(s->avctx, AV_LOG_WARNING,
"%d undecompressed bytes left in buffer\n", s->zstream.avail_in);
return 0;
}

View File

@ -412,15 +412,21 @@ static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
#endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
const char *load_plugins)
const char *load_plugins, int gpu_copy)
{
mfxIMPL impl = MFX_IMPL_AUTO_ANY;
mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
mfxIMPL impl = MFX_IMPL_AUTO_ANY;
mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
const char *desc;
int ret;
ret = MFXInit(impl, &ver, &qs->session);
#if QSV_VERSION_ATLEAST(1, 16)
init_par.GPUCopy = gpu_copy;
#endif
init_par.Implementation = impl;
init_par.Version = ver;
ret = MFXInitEx(init_par, &qs->session);
if (ret < 0)
return ff_qsv_print_error(avctx, ret,
"Error initializing an internal MFX session");
@ -712,7 +718,8 @@ static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
}
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
AVBufferRef *device_ref, const char *load_plugins)
AVBufferRef *device_ref, const char *load_plugins,
int gpu_copy)
{
static const mfxHandleType handle_types[] = {
MFX_HANDLE_VA_DISPLAY,
@ -722,11 +729,12 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
mfxSession parent_session = device_hwctx->session;
mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
mfxHDL handle = NULL;
mfxSession session;
mfxVersion ver;
mfxIMPL impl;
mfxHDL handle = NULL;
mfxHandleType handle_type;
mfxStatus err;
@ -752,7 +760,12 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
"from the session\n");
}
err = MFXInit(impl, &ver, &session);
#if QSV_VERSION_ATLEAST(1, 16)
init_par.GPUCopy = gpu_copy;
#endif
init_par.Implementation = impl;
init_par.Version = ver;
err = MFXInitEx(init_par, &session);
if (err != MFX_ERR_NONE)
return ff_qsv_print_error(avctx, err,
"Error initializing a child MFX session");
@ -783,7 +796,7 @@ int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
QSVFramesContext *qsv_frames_ctx,
const char *load_plugins, int opaque)
const char *load_plugins, int opaque, int gpu_copy)
{
mfxFrameAllocator frame_allocator = {
.pthis = qsv_frames_ctx,
@ -803,7 +816,7 @@ int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
int ret;
ret = ff_qsv_init_session_device(avctx, &session,
frames_ctx->device_ref, load_plugins);
frames_ctx->device_ref, load_plugins, gpu_copy);
if (ret < 0)
return ret;
@ -835,9 +848,7 @@ int ff_qsv_close_internal_session(QSVSession *qs)
qs->session = NULL;
}
#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
if (qs->va_device_ctx) {
qs->va_device_ctx->free(qs->va_device_ctx);
}
av_buffer_unref(&qs->va_device_ref);
#endif
return 0;
}

View File

@ -127,16 +127,17 @@ enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type);
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct);
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
const char *load_plugins);
const char *load_plugins, int gpu_copy);
int ff_qsv_close_internal_session(QSVSession *qs);
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
AVBufferRef *device_ref, const char *load_plugins);
AVBufferRef *device_ref, const char *load_plugins,
int gpu_copy);
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *session,
QSVFramesContext *qsv_frames_ctx,
const char *load_plugins, int opaque);
const char *load_plugins, int opaque, int gpu_copy);
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame);

View File

@ -34,9 +34,11 @@
#include "libavutil/pixdesc.h"
#include "libavutil/pixfmt.h"
#include "libavutil/time.h"
#include "libavutil/imgutils.h"
#include "avcodec.h"
#include "internal.h"
#include "decode.h"
#include "qsv.h"
#include "qsv_internal.h"
#include "qsvdec.h"
@ -54,11 +56,52 @@ const AVCodecHWConfigInternal *ff_qsv_hw_configs[] = {
NULL
};
static int ff_qsv_get_continuous_buffer(AVCodecContext *avctx, AVFrame *frame, AVBufferPool *pool)
{
int ret = 0;
ff_decode_frame_props(avctx, frame);
frame->width = avctx->width;
frame->height = avctx->height;
switch (avctx->pix_fmt) {
case AV_PIX_FMT_NV12:
frame->linesize[0] = FFALIGN(avctx->width, 128);
break;
case AV_PIX_FMT_P010:
frame->linesize[0] = 2 * FFALIGN(avctx->width, 128);
break;
default:
av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n");
return AVERROR(ENOMEM);
}
frame->linesize[1] = frame->linesize[0];
frame->buf[0] = av_buffer_pool_get(pool);
if (!frame->buf[0])
return AVERROR(ENOMEM);
frame->data[0] = frame->buf[0]->data;
frame->data[1] = frame->data[0] +
frame->linesize[0] * FFALIGN(avctx->height, 64);
ret = ff_attach_decode_data(frame);
if (ret < 0)
return ret;
return 0;
}
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
{
int ret;
if (q->gpu_copy == MFX_GPUCOPY_ON &&
!(q->iopattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY))
av_log(avctx, AV_LOG_WARNING, "GPU-accelerated memory copy "
"only works in MFX_IOPATTERN_OUT_SYSTEM_MEMORY.\n");
if (session) {
q->session = session;
} else if (hw_frames_ref) {
@ -74,7 +117,8 @@ static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession ses
ret = ff_qsv_init_session_frames(avctx, &q->internal_qs.session,
&q->frames_ctx, q->load_plugins,
q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY,
q->gpu_copy);
if (ret < 0) {
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
return ret;
@ -88,7 +132,7 @@ static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession ses
}
ret = ff_qsv_init_session_device(avctx, &q->internal_qs.session,
hw_device_ref, q->load_plugins);
hw_device_ref, q->load_plugins, q->gpu_copy);
if (ret < 0)
return ret;
@ -96,7 +140,7 @@ static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession ses
} else {
if (!q->internal_qs.session) {
ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
q->load_plugins);
q->load_plugins, q->gpu_copy);
if (ret < 0)
return ret;
}
@ -229,6 +273,9 @@ static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *
q->frame_info = param->mfx.FrameInfo;
if (!avctx->hw_frames_ctx)
q->pool = av_buffer_pool_init(av_image_get_buffer_size(avctx->pix_fmt,
FFALIGN(avctx->width, 128), FFALIGN(avctx->height, 64), 1), av_buffer_allocz);
return 0;
}
@ -275,7 +322,11 @@ static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
{
int ret;
ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
if (q->pool)
ret = ff_qsv_get_continuous_buffer(avctx, frame->frame, q->pool);
else
ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
if (ret < 0)
return ret;
@ -535,6 +586,7 @@ int ff_qsv_decode_close(QSVContext *q)
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
av_buffer_unref(&q->frames_ctx.mids_buf);
av_buffer_pool_uninit(&q->pool);
return 0;
}

View File

@ -59,12 +59,14 @@ typedef struct QSVContext {
enum AVPixelFormat orig_pix_fmt;
uint32_t fourcc;
mfxFrameInfo frame_info;
AVBufferPool *pool;
int initialized;
// options set by the caller
int async_depth;
int iopattern;
int gpu_copy;
char *load_plugins;

View File

@ -193,6 +193,11 @@ static const AVOption hevc_options[] = {
{ "load_plugins", "A :-separate list of hexadecimal plugin UIDs to load in an internal session",
OFFSET(qsv.load_plugins), AV_OPT_TYPE_STRING, { .str = "" }, 0, 0, VD },
{ "gpu_copy", "A GPU-accelerated copy between video and system memory", OFFSET(qsv.gpu_copy), AV_OPT_TYPE_INT, { .i64 = MFX_GPUCOPY_DEFAULT }, MFX_GPUCOPY_DEFAULT, MFX_GPUCOPY_OFF, VD, "gpu_copy"},
{ "default", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_DEFAULT }, 0, 0, VD, "gpu_copy"},
{ "on", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_ON }, 0, 0, VD, "gpu_copy"},
{ "off", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_OFF }, 0, 0, VD, "gpu_copy"},
{ NULL },
};
@ -228,6 +233,11 @@ AVCodec ff_hevc_qsv_decoder = {
#if CONFIG_H264_QSV_DECODER
static const AVOption options[] = {
{ "async_depth", "Internal parallelization depth, the higher the value the higher the latency.", OFFSET(qsv.async_depth), AV_OPT_TYPE_INT, { .i64 = ASYNC_DEPTH_DEFAULT }, 1, INT_MAX, VD },
{ "gpu_copy", "A GPU-accelerated copy between video and system memory", OFFSET(qsv.gpu_copy), AV_OPT_TYPE_INT, { .i64 = MFX_GPUCOPY_DEFAULT }, MFX_GPUCOPY_DEFAULT, MFX_GPUCOPY_OFF, VD, "gpu_copy"},
{ "default", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_DEFAULT }, 0, 0, VD, "gpu_copy"},
{ "on", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_ON }, 0, 0, VD, "gpu_copy"},
{ "off", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_OFF }, 0, 0, VD, "gpu_copy"},
{ NULL },
};

View File

@ -181,6 +181,11 @@ static void qsv_decode_flush(AVCodecContext *avctx)
#define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
static const AVOption options[] = {
{ "async_depth", "Internal parallelization depth, the higher the value the higher the latency.", OFFSET(qsv.async_depth), AV_OPT_TYPE_INT, { .i64 = ASYNC_DEPTH_DEFAULT }, 1, INT_MAX, VD },
{ "gpu_copy", "A GPU-accelerated copy between video and system memory", OFFSET(qsv.gpu_copy), AV_OPT_TYPE_INT, { .i64 = MFX_GPUCOPY_DEFAULT }, MFX_GPUCOPY_DEFAULT, MFX_GPUCOPY_OFF, VD, "gpu_copy"},
{ "default", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_DEFAULT }, 0, 0, VD, "gpu_copy"},
{ "on", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_ON }, 0, 0, VD, "gpu_copy"},
{ "off", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_GPUCOPY_OFF }, 0, 0, VD, "gpu_copy"},
{ NULL },
};

View File

@ -956,7 +956,8 @@ static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
ret = ff_qsv_init_session_frames(avctx, &q->internal_qs.session,
&q->frames_ctx, q->load_plugins,
q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY);
q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY,
MFX_GPUCOPY_OFF);
if (ret < 0) {
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
return ret;
@ -965,14 +966,15 @@ static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
q->session = q->internal_qs.session;
} else if (avctx->hw_device_ctx) {
ret = ff_qsv_init_session_device(avctx, &q->internal_qs.session,
avctx->hw_device_ctx, q->load_plugins);
avctx->hw_device_ctx, q->load_plugins,
MFX_GPUCOPY_OFF);
if (ret < 0)
return ret;
q->session = q->internal_qs.session;
} else {
ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
q->load_plugins);
q->load_plugins, MFX_GPUCOPY_OFF);
if (ret < 0)
return ret;

View File

@ -534,6 +534,9 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
uint32_t clr, *dst = (uint32_t *)s->current_frame->data[0];
int y;
if (bytestream2_get_bytes_left(gb) < 3)
return AVERROR_INVALIDDATA;
frame->key_frame = 1;
bytestream2_skip(gb, 1);
if (avctx->bits_per_coded_sample == 16) {

View File

@ -100,14 +100,18 @@ static int sunrast_decode_frame(AVCodecContext *avctx, void *data,
if (ret < 0)
return ret;
/* scanlines are aligned on 16 bit boundaries */
len = (depth * w + 7) >> 3;
alen = len + (len & 1);
if (buf_end - buf < maplength + (len * h) * 3 / 256)
return AVERROR_INVALIDDATA;
if ((ret = ff_get_buffer(avctx, p, 0)) < 0)
return ret;
p->pict_type = AV_PICTURE_TYPE_I;
if (buf_end - buf < maplength)
return AVERROR_INVALIDDATA;
if (depth > 8 && maplength) {
av_log(avctx, AV_LOG_WARNING, "useless colormap found or file is corrupted, trying to recover\n");
@ -136,10 +140,6 @@ static int sunrast_decode_frame(AVCodecContext *avctx, void *data,
stride = p->linesize[0];
}
/* scanlines are aligned on 16 bit boundaries */
len = (depth * w + 7) >> 3;
alen = len + (len & 1);
if (type == RT_BYTE_ENCODED) {
int value, run;
uint8_t *end = ptr + h * stride;

View File

@ -444,6 +444,8 @@ static int truemotion1_decode_header(TrueMotion1Context *s)
if (s->flags & FLAG_KEYFRAME) {
/* no change bits specified for a keyframe; only index bytes */
s->index_stream = s->mb_change_bits;
if (s->avctx->width * s->avctx->height / 2048 + header.header_size > s->size)
return AVERROR_INVALIDDATA;
} else {
/* one change bit per 4x4 block */
s->index_stream = s->mb_change_bits +

View File

@ -256,7 +256,7 @@ static const AVOption options[] = {
.capabilities = AV_CODEC_CAP_HARDWARE | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING, \
.caps_internal = FF_CODEC_CAP_SETS_PKT_DTS, \
.wrapper_name = "v4l2m2m", \
};
}
M2MDEC(h264, "H.264", AV_CODEC_ID_H264, "h264_mp4toannexb");
M2MDEC(hevc, "HEVC", AV_CODEC_ID_HEVC, "hevc_mp4toannexb");

View File

@ -29,7 +29,7 @@
#define LIBAVCODEC_VERSION_MAJOR 58
#define LIBAVCODEC_VERSION_MINOR 59
#define LIBAVCODEC_VERSION_MICRO 101
#define LIBAVCODEC_VERSION_MICRO 102
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \

View File

@ -617,7 +617,7 @@ static void videotoolbox_decoder_callback(void *opaque,
}
if (!image_buffer) {
av_log(NULL, AV_LOG_DEBUG, "vt decoder cb: output image buffer is null\n");
av_log(avctx, AV_LOG_DEBUG, "vt decoder cb: output image buffer is null\n");
return;
}

View File

@ -1888,12 +1888,14 @@ static av_cold int xma_decode_init(AVCodecContext *avctx)
s->num_streams = avctx->extradata[1];
if (avctx->extradata_size != (32 + ((avctx->extradata[0]==3)?0:8) + 4*s->num_streams)) {
av_log(avctx, AV_LOG_ERROR, "Incorrect XMA2 extradata size\n");
s->num_streams = 0;
return AVERROR(EINVAL);
}
} else if (avctx->codec_id == AV_CODEC_ID_XMA1 && avctx->extradata_size >= 4) { /* XMAWAVEFORMAT */
s->num_streams = avctx->extradata[4];
if (avctx->extradata_size != (8 + 20*s->num_streams)) {
av_log(avctx, AV_LOG_ERROR, "Incorrect XMA1 extradata size\n");
s->num_streams = 0;
return AVERROR(EINVAL);
}
} else {
@ -1906,6 +1908,7 @@ static av_cold int xma_decode_init(AVCodecContext *avctx)
s->num_streams <= 0
) {
avpriv_request_sample(avctx, "More than %d channels in %d streams", XMA_MAX_CHANNELS, s->num_streams);
s->num_streams = 0;
return AVERROR_PATCHWELCOME;
}
@ -1938,6 +1941,7 @@ static av_cold int xma_decode_end(AVCodecContext *avctx)
decode_end(&s->xma[i]);
av_frame_free(&s->frames[i]);
}
s->num_streams = 0;
return 0;
}
@ -1993,6 +1997,7 @@ AVCodec ff_wmapro_decoder = {
.close = wmapro_decode_end,
.decode = wmapro_decode_packet,
.capabilities = AV_CODEC_CAP_SUBFRAMES | AV_CODEC_CAP_DR1,
.caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
.flush = wmapro_flush,
.sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_FLTP,
AV_SAMPLE_FMT_NONE },
@ -2008,6 +2013,7 @@ AVCodec ff_xma1_decoder = {
.close = xma_decode_end,
.decode = xma_decode_packet,
.capabilities = AV_CODEC_CAP_SUBFRAMES | AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
.caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
.sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_FLTP,
AV_SAMPLE_FMT_NONE },
};
@ -2023,6 +2029,7 @@ AVCodec ff_xma2_decoder = {
.decode = xma_decode_packet,
.flush = xma_flush,
.capabilities = AV_CODEC_CAP_SUBFRAMES | AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
.caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
.sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_FLTP,
AV_SAMPLE_FMT_NONE },
};

View File

@ -36,6 +36,7 @@ typedef struct ChanDelay {
typedef struct AudioDelayContext {
const AVClass *class;
int all;
char *delays;
ChanDelay *chandelay;
int nb_delays;
@ -54,6 +55,7 @@ typedef struct AudioDelayContext {
static const AVOption adelay_options[] = {
{ "delays", "set list of delays for each channel", OFFSET(delays), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, A },
{ "all", "use last available delay for remained channels", OFFSET(all), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, A },
{ NULL }
};
@ -163,6 +165,11 @@ static int config_input(AVFilterLink *inlink)
}
}
if (s->all && i) {
for (int j = i; j < s->nb_delays; j++)
s->chandelay[j].delay = s->chandelay[i-1].delay;
}
s->padding = s->chandelay[0].delay;
for (i = 1; i < s->nb_delays; i++) {
ChanDelay *d = &s->chandelay[i];

View File

@ -63,7 +63,7 @@ typedef struct AudioNLMSContext {
static const AVOption anlms_options[] = {
{ "order", "set the filter order", OFFSET(order), AV_OPT_TYPE_INT, {.i64=256}, 1, INT16_MAX, A },
{ "mu", "set the filter mu", OFFSET(mu), AV_OPT_TYPE_FLOAT, {.dbl=0.75}, 0, 1, A },
{ "mu", "set the filter mu", OFFSET(mu), AV_OPT_TYPE_FLOAT, {.dbl=0.75}, 0, 2, A },
{ "eps", "set the filter eps", OFFSET(eps), AV_OPT_TYPE_FLOAT, {.dbl=1}, 0, 1, A },
{ "leakage", "set the filter leakage", OFFSET(leakage), AV_OPT_TYPE_FLOAT, {.dbl=0}, 0, 1, A },
{ "out_mode", "set output mode", OFFSET(output_mode), AV_OPT_TYPE_INT, {.i64=OUT_MODE}, 0, NB_OMODES-1, A, "mode" },
@ -281,6 +281,22 @@ static av_cold int init(AVFilterContext *ctx)
return 0;
}
static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
char *res, int res_len, int flags)
{
AudioNLMSContext *s = ctx->priv;
int ret;
if ( !strcmp(cmd, "mu") || !strcmp(cmd, "eps")
|| !strcmp(cmd, "leakage") || !strcmp(cmd, "out_mode")) {
ret = av_opt_set(s, cmd, args, 0);
} else {
ret = AVERROR(ENOSYS);
}
return ret;
}
static av_cold void uninit(AVFilterContext *ctx)
{
AudioNLMSContext *s = ctx->priv;
@ -325,4 +341,5 @@ AVFilter ff_af_anlms = {
.inputs = inputs,
.outputs = outputs,
.flags = AVFILTER_FLAG_SLICE_THREADS,
.process_command = process_command,
};

View File

@ -91,7 +91,7 @@ int ff_boxblur_eval_filter_params(AVFilterLink *inlink,
NULL, NULL, NULL, NULL, NULL, 0, ctx); \
comp->radius = res; \
if (ret < 0) { \
av_log(NULL, AV_LOG_ERROR, \
av_log(ctx, AV_LOG_ERROR, \
"Error when evaluating " #comp " radius expression '%s'\n", expr); \
return ret; \
}

View File

@ -317,7 +317,6 @@ do { \
void *oldf = *f; \
\
if (!(*f) && !(*f = av_mallocz(sizeof(**f)))) { \
unref_fn(f); \
return AVERROR(ENOMEM); \
} \
\
@ -456,7 +455,7 @@ do { \
do { \
int idx = -1; \
\
if (!*ref || !(*ref)->refs) \
if (!ref || !*ref || !(*ref)->refs) \
return; \
\
FIND_REF_INDEX(ref, idx); \
@ -518,7 +517,8 @@ void ff_formats_changeref(AVFilterFormats **oldref, AVFilterFormats **newref)
int ret = ref_fn(fmts, &ctx->inputs[i]->out_fmts); \
if (ret < 0) { \
unref_fn(&fmts); \
av_freep(&fmts->list); \
if (fmts) \
av_freep(&fmts->list); \
av_freep(&fmts); \
return ret; \
} \
@ -530,7 +530,8 @@ void ff_formats_changeref(AVFilterFormats **oldref, AVFilterFormats **newref)
int ret = ref_fn(fmts, &ctx->outputs[i]->in_fmts); \
if (ret < 0) { \
unref_fn(&fmts); \
av_freep(&fmts->list); \
if (fmts) \
av_freep(&fmts->list); \
av_freep(&fmts); \
return ret; \
} \

View File

@ -639,11 +639,11 @@ static av_cold void init_blend_func_##depth##_##nbits##bit(FilterParams *param)
case BLEND_XOR: param->blend = blend_xor_##depth##bit; break; \
} \
}
DEFINE_INIT_BLEND_FUNC(8, 8);
DEFINE_INIT_BLEND_FUNC(9, 16);
DEFINE_INIT_BLEND_FUNC(10, 16);
DEFINE_INIT_BLEND_FUNC(12, 16);
DEFINE_INIT_BLEND_FUNC(16, 16);
DEFINE_INIT_BLEND_FUNC(8, 8)
DEFINE_INIT_BLEND_FUNC(9, 16)
DEFINE_INIT_BLEND_FUNC(10, 16)
DEFINE_INIT_BLEND_FUNC(12, 16)
DEFINE_INIT_BLEND_FUNC(16, 16)
void ff_blend_init(FilterParams *param, int depth)
{

View File

@ -331,15 +331,15 @@ static void apply_lut(int16_t *buf[3], ptrdiff_t stride,
}
}
struct ThreadData {
typedef struct ThreadData {
AVFrame *in, *out;
ptrdiff_t in_linesize[3], out_linesize[3];
int in_ss_h, out_ss_h;
};
} ThreadData;
static int convert(AVFilterContext *ctx, void *data, int job_nr, int n_jobs)
{
struct ThreadData *td = data;
const ThreadData *td = data;
ColorSpaceContext *s = ctx->priv;
uint8_t *in_data[3], *out_data[3];
int16_t *rgb[3];
@ -771,7 +771,7 @@ static int filter_frame(AVFilterLink *link, AVFrame *in)
int res;
ptrdiff_t rgb_stride = FFALIGN(in->width * sizeof(int16_t), 32);
unsigned rgb_sz = rgb_stride * in->height;
struct ThreadData td;
ThreadData td;
if (!out) {
av_frame_free(&in);

View File

@ -244,7 +244,7 @@ static int config_input(AVFilterLink *link)
return 0;
fail_expr:
av_log(NULL, AV_LOG_ERROR, "Error when evaluating the expression '%s'\n", expr);
av_log(ctx, AV_LOG_ERROR, "Error when evaluating the expression '%s'\n", expr);
return ret;
}

View File

@ -168,7 +168,7 @@ static void apply_delogo(uint8_t *dst, int dst_linesize,
botleft[x-logo_x1-1] +
botleft[x-logo_x1+1]) * weightb;
weight = (weightl + weightr + weightt + weightb) * 3U;
interp = ROUNDED_DIV(interp, weight);
interp = (interp + (weight >> 1)) / weight;
if (y >= logo_y+band && y < logo_y+logo_h-band &&
x >= logo_x+band && x < logo_x+logo_w-band) {
@ -327,6 +327,21 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
s->w = av_expr_eval(s->w_pexpr, s->var_values, s);
s->h = av_expr_eval(s->h_pexpr, s->var_values, s);
if (s->x + (s->band - 1) <= 0 || s->x + s->w - (s->band*2 - 2) > inlink->w ||
s->y + (s->band - 1) <= 0 || s->y + s->h - (s->band*2 - 2) > inlink->h) {
av_log(s, AV_LOG_WARNING, "Logo area is outside of the frame,"
" auto set the area inside of the frame\n");
}
if (s->x + (s->band - 1) <= 0)
s->x = 1 + s->band;
if (s->y + (s->band - 1) <= 0)
s->y = 1 + s->band;
if (s->x + s->w - (s->band*2 - 2) > inlink->w)
s->w = inlink->w - s->x - (s->band*2 - 2);
if (s->y + s->h - (s->band*2 - 2) > inlink->h)
s->h = inlink->h - s->y - (s->band*2 - 2);
ret = config_input(inlink);
if (ret < 0) {
av_frame_free(&in);

View File

@ -223,7 +223,9 @@ static av_cold void uninit(AVFilterContext *ctx)
av_freep(&s->coefs[1]);
av_freep(&s->coefs[2]);
av_freep(&s->coefs[3]);
av_freep(&s->line);
av_freep(&s->line[0]);
av_freep(&s->line[1]);
av_freep(&s->line[2]);
av_freep(&s->frame_prev[0]);
av_freep(&s->frame_prev[1]);
av_freep(&s->frame_prev[2]);
@ -271,9 +273,11 @@ static int config_input(AVFilterLink *inlink)
s->vsub = desc->log2_chroma_h;
s->depth = desc->comp[0].depth;
s->line = av_malloc_array(inlink->w, sizeof(*s->line));
if (!s->line)
return AVERROR(ENOMEM);
for (i = 0; i < 3; i++) {
s->line[i] = av_malloc_array(inlink->w, sizeof(*s->line[i]));
if (!s->line[i])
return AVERROR(ENOMEM);
}
for (i = 0; i < 4; i++) {
s->coefs[i] = precalc_coefs(s->strength[i], s->depth);
@ -287,14 +291,38 @@ static int config_input(AVFilterLink *inlink)
return 0;
}
typedef struct ThreadData {
AVFrame *in, *out;
int direct;
} ThreadData;
static int do_denoise(AVFilterContext *ctx, void *data, int job_nr, int n_jobs)
{
HQDN3DContext *s = ctx->priv;
const ThreadData *td = data;
AVFrame *out = td->out;
AVFrame *in = td->in;
int direct = td->direct;
denoise(s, in->data[job_nr], out->data[job_nr],
s->line[job_nr], &s->frame_prev[job_nr],
AV_CEIL_RSHIFT(in->width, (!!job_nr * s->hsub)),
AV_CEIL_RSHIFT(in->height, (!!job_nr * s->vsub)),
in->linesize[job_nr], out->linesize[job_nr],
s->coefs[job_nr ? CHROMA_SPATIAL : LUMA_SPATIAL],
s->coefs[job_nr ? CHROMA_TMP : LUMA_TMP]);
return 0;
}
static int filter_frame(AVFilterLink *inlink, AVFrame *in)
{
AVFilterContext *ctx = inlink->dst;
HQDN3DContext *s = ctx->priv;
AVFilterLink *outlink = ctx->outputs[0];
AVFrame *out;
int c, direct = av_frame_is_writable(in) && !ctx->is_disabled;
int direct = av_frame_is_writable(in) && !ctx->is_disabled;
ThreadData td;
if (direct) {
out = in;
@ -308,15 +336,11 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in)
av_frame_copy_props(out, in);
}
for (c = 0; c < 3; c++) {
denoise(s, in->data[c], out->data[c],
s->line, &s->frame_prev[c],
AV_CEIL_RSHIFT(in->width, (!!c * s->hsub)),
AV_CEIL_RSHIFT(in->height, (!!c * s->vsub)),
in->linesize[c], out->linesize[c],
s->coefs[c ? CHROMA_SPATIAL : LUMA_SPATIAL],
s->coefs[c ? CHROMA_TMP : LUMA_TMP]);
}
td.in = in;
td.out = out;
td.direct = direct;
/* one thread per plane */
ctx->internal->execute(ctx, do_denoise, &td, NULL, 3);
if (ctx->is_disabled) {
av_frame_free(&out);
@ -370,5 +394,5 @@ AVFilter ff_vf_hqdn3d = {
.query_formats = query_formats,
.inputs = avfilter_vf_hqdn3d_inputs,
.outputs = avfilter_vf_hqdn3d_outputs,
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL | AVFILTER_FLAG_SLICE_THREADS,
};

View File

@ -31,7 +31,7 @@
typedef struct HQDN3DContext {
const AVClass *class;
int16_t *coefs[4];
uint16_t *line;
uint16_t *line[3];
uint16_t *frame_prev[3];
double strength[4];
int hsub, vsub;

View File

@ -65,7 +65,7 @@ typedef struct ThreadData {
static int filter_slice(AVFilterContext *ctx, void *arg, int job, int nb_jobs)
{
ThreadData *td = (ThreadData*)arg;
ThreadData *td = arg;
AVFrame *in = td->in;
AVFrame *out = td->out;

View File

@ -81,6 +81,17 @@
#include "internal.h"
#include "video.h"
typedef struct NormalizeHistory {
uint8_t *history; // History entries.
uint32_t history_sum; // Sum of history entries.
} NormalizeHistory;
typedef struct NormalizeLocal {
uint8_t in; // Original input byte value for this frame.
float smoothed; // Smoothed input value [0,255].
float out; // Output value [0,255]
} NormalizeLocal;
typedef struct NormalizeContext {
const AVClass *class;
@ -98,10 +109,7 @@ typedef struct NormalizeContext {
int frame_num; // Increments on each frame, starting from 0.
// Per-extremum, per-channel history, for temporal smoothing.
struct {
uint8_t *history; // History entries.
uint32_t history_sum; // Sum of history entries.
} min[3], max[3]; // Min and max for each channel in {R,G,B}.
NormalizeHistory min[3], max[3]; // Min and max for each channel in {R,G,B}.
uint8_t *history_mem; // Single allocation for above history entries
} NormalizeContext;
@ -126,11 +134,7 @@ AVFILTER_DEFINE_CLASS(normalize);
static void normalize(NormalizeContext *s, AVFrame *in, AVFrame *out)
{
// Per-extremum, per-channel local variables.
struct {
uint8_t in; // Original input byte value for this frame.
float smoothed; // Smoothed input value [0,255].
float out; // Output value [0,255].
} min[3], max[3]; // Min and max for each channel in {R,G,B}.
NormalizeLocal min[3], max[3]; // Min and max for each channel in {R,G,B}.
float rgb_min_smoothed; // Min input range for linked normalization
float rgb_max_smoothed; // Max input range for linked normalization
@ -143,14 +147,12 @@ static void normalize(NormalizeContext *s, AVFrame *in, AVFrame *out)
min[c].in = max[c].in = in->data[0][s->co[c]];
for (y = 0; y < in->height; y++) {
uint8_t *inp = in->data[0] + y * in->linesize[0];
uint8_t *outp = out->data[0] + y * out->linesize[0];
for (x = 0; x < in->width; x++) {
for (c = 0; c < 3; c++) {
min[c].in = FFMIN(min[c].in, inp[s->co[c]]);
max[c].in = FFMAX(max[c].in, inp[s->co[c]]);
}
inp += s->step;
outp += s->step;
}
}

View File

@ -210,7 +210,7 @@ static int config_input(AVFilterLink *inlink)
return 0;
eval_fail:
av_log(NULL, AV_LOG_ERROR,
av_log(ctx, AV_LOG_ERROR,
"Error when evaluating the expression '%s'\n", expr);
return ret;

View File

@ -140,7 +140,7 @@ fail:
static int remap_planar##bits##_##name##_slice(AVFilterContext *ctx, void *arg, \
int jobnr, int nb_jobs) \
{ \
const ThreadData *td = (ThreadData*)arg; \
const ThreadData *td = arg; \
const AVFrame *in = td->in; \
const AVFrame *xin = td->xin; \
const AVFrame *yin = td->yin; \
@ -189,7 +189,7 @@ DEFINE_REMAP_PLANAR_FUNC(nearest, 16, 2)
static int remap_packed##bits##_##name##_slice(AVFilterContext *ctx, void *arg, \
int jobnr, int nb_jobs) \
{ \
const ThreadData *td = (ThreadData*)arg; \
const ThreadData *td = arg; \
const AVFrame *in = td->in; \
const AVFrame *xin = td->xin; \
const AVFrame *yin = td->yin; \

View File

@ -541,7 +541,7 @@ static int qsvscale_config_props(AVFilterLink *outlink)
return 0;
fail:
av_log(NULL, AV_LOG_ERROR,
av_log(ctx, AV_LOG_ERROR,
"Error when evaluating the expression '%s'\n", expr);
return ret;
}

View File

@ -136,5 +136,4 @@ AVFilter ff_vf_showpalette = {
.inputs = showpalette_inputs,
.outputs = showpalette_outputs,
.priv_class = &showpalette_class,
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC,
};

View File

@ -226,7 +226,7 @@ DEFINE_REMAP1_LINE(16, 2)
#define DEFINE_REMAP(ws, bits) \
static int remap##ws##_##bits##bit_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \
{ \
ThreadData *td = (ThreadData*)arg; \
ThreadData *td = arg; \
const V360Context *s = ctx->priv; \
const AVFrame *in = td->in; \
AVFrame *out = td->out; \

View File

@ -176,8 +176,8 @@ static void convolution_y_##bits##bit(const uint16_t *filter, int filt_w, \
} \
}
conv_y_fn(uint8_t, 8);
conv_y_fn(uint16_t, 10);
conv_y_fn(uint8_t, 8)
conv_y_fn(uint16_t, 10)
static void vmafmotiondsp_init(VMAFMotionDSPContext *dsp, int bpp) {
dsp->convolution_x = convolution_x;

View File

@ -37,9 +37,9 @@ static void FUNC_NAME(SCENE_SAD_PARAMS) { \
}
#if HAVE_X86ASM
SCENE_SAD_FUNC(scene_sad_sse2, ff_scene_sad_sse2, 16);
SCENE_SAD_FUNC(scene_sad_sse2, ff_scene_sad_sse2, 16)
#if HAVE_AVX2_EXTERNAL
SCENE_SAD_FUNC(scene_sad_avx2, ff_scene_sad_avx2, 32);
SCENE_SAD_FUNC(scene_sad_avx2, ff_scene_sad_avx2, 32)
#endif
#endif

View File

@ -117,8 +117,8 @@ static const AVMetadataConv avi_metadata_conv[] = {
static int avi_load_index(AVFormatContext *s);
static int guess_ni_flag(AVFormatContext *s);
#define print_tag(str, tag, size) \
av_log(NULL, AV_LOG_TRACE, "pos:%"PRIX64" %s: tag=%s size=0x%x\n", \
#define print_tag(s, str, tag, size) \
av_log(s, AV_LOG_TRACE, "pos:%"PRIX64" %s: tag=%s size=0x%x\n", \
avio_tell(pb), str, av_fourcc2str(tag), size) \
static inline int get_duration(AVIStream *ast, int len)
@ -504,7 +504,7 @@ static int avi_read_header(AVFormatContext *s)
tag = avio_rl32(pb);
size = avio_rl32(pb);
print_tag("tag", tag, size);
print_tag(s, "tag", tag, size);
switch (tag) {
case MKTAG('L', 'I', 'S', 'T'):
@ -512,7 +512,7 @@ static int avi_read_header(AVFormatContext *s)
/* Ignored, except at start of video packets. */
tag1 = avio_rl32(pb);
print_tag("list", tag1, 0);
print_tag(s, "list", tag1, 0);
if (tag1 == MKTAG('m', 'o', 'v', 'i')) {
avi->movi_list = avio_tell(pb) - 4;
@ -520,7 +520,7 @@ static int avi_read_header(AVFormatContext *s)
avi->movi_end = avi->movi_list + size + (size & 1);
else
avi->movi_end = avi->fsize;
av_log(NULL, AV_LOG_TRACE, "movi end=%"PRIx64"\n", avi->movi_end);
av_log(s, AV_LOG_TRACE, "movi end=%"PRIx64"\n", avi->movi_end);
goto end_of_header;
} else if (tag1 == MKTAG('I', 'N', 'F', 'O'))
ff_read_riff_info(s, size - 4);
@ -584,7 +584,7 @@ static int avi_read_header(AVFormatContext *s)
tag1 = stream_index ? MKTAG('a', 'u', 'd', 's')
: MKTAG('v', 'i', 'd', 's');
print_tag("strh", tag1, -1);
print_tag(s, "strh", tag1, -1);
if (tag1 == MKTAG('i', 'a', 'v', 's') ||
tag1 == MKTAG('i', 'v', 'a', 's')) {
@ -802,7 +802,7 @@ FF_ENABLE_DEPRECATION_WARNINGS
ast->has_pal = 1;
}
print_tag("video", tag1, 0);
print_tag(s, "video", tag1, 0);
st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
st->codecpar->codec_tag = tag1;

View File

@ -26,6 +26,7 @@
#include "flac_picture.h"
#include "id3v2.h"
#include "internal.h"
#include "avio_internal.h"
int ff_flac_parse_picture(AVFormatContext *s, uint8_t *buf, int buf_size)
{
@ -33,15 +34,13 @@ int ff_flac_parse_picture(AVFormatContext *s, uint8_t *buf, int buf_size)
enum AVCodecID id = AV_CODEC_ID_NONE;
AVBufferRef *data = NULL;
uint8_t mimetype[64], *desc = NULL;
AVIOContext *pb = NULL;
AVIOContext pb0, *pb = &pb0;
AVStream *st;
int width, height, ret = 0;
int len;
unsigned int type;
pb = avio_alloc_context(buf, buf_size, 0, NULL, NULL, NULL, NULL);
if (!pb)
return AVERROR(ENOMEM);
ffio_init_context(pb, buf, buf_size, 0, NULL, NULL, NULL, NULL);
/* read the picture type */
type = avio_rb32(pb);
@ -145,14 +144,11 @@ int ff_flac_parse_picture(AVFormatContext *s, uint8_t *buf, int buf_size)
if (desc)
av_dict_set(&st->metadata, "title", desc, AV_DICT_DONT_STRDUP_VAL);
avio_context_free(&pb);
return 0;
fail:
av_buffer_unref(&data);
av_freep(&desc);
avio_context_free(&pb);
return ret;
}

View File

@ -594,7 +594,7 @@ static int ensure_playlist(HLSContext *c, struct playlist **pls, const char *url
}
static int open_url_keepalive(AVFormatContext *s, AVIOContext **pb,
const char *url)
const char *url, AVDictionary **options)
{
#if !CONFIG_HTTP_PROTOCOL
return AVERROR_PROTOCOL_NOT_FOUND;
@ -603,7 +603,7 @@ static int open_url_keepalive(AVFormatContext *s, AVIOContext **pb,
URLContext *uc = ffio_geturlcontext(*pb);
av_assert0(uc);
(*pb)->eof_reached = 0;
ret = ff_http_do_new_request(uc, url);
ret = ff_http_do_new_request2(uc, url, options);
if (ret < 0) {
ff_format_io_close(s, pb);
}
@ -656,7 +656,7 @@ static int open_url(AVFormatContext *s, AVIOContext **pb, const char *url,
av_dict_copy(&tmp, opts2, 0);
if (is_http && c->http_persistent && *pb) {
ret = open_url_keepalive(c->ctx, pb, url);
ret = open_url_keepalive(c->ctx, pb, url, &tmp);
if (ret == AVERROR_EXIT) {
av_dict_free(&tmp);
return ret;
@ -714,7 +714,7 @@ static int parse_playlist(HLSContext *c, const char *url,
if (is_http && !in && c->http_persistent && c->playlist_pb) {
in = c->playlist_pb;
ret = open_url_keepalive(c->ctx, &c->playlist_pb, url);
ret = open_url_keepalive(c->ctx, &c->playlist_pb, url, NULL);
if (ret == AVERROR_EXIT) {
return ret;
} else if (ret < 0) {
@ -1449,6 +1449,7 @@ reload:
if (c->http_multiple == 1 && v->input_next_requested) {
FFSWAP(AVIOContext *, v->input, v->input_next);
v->cur_seg_offset = 0;
v->input_next_requested = 0;
ret = 0;
} else {

View File

@ -486,7 +486,7 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
float playlist_duration = 0.0f;
int ret = 0, path_size, sub_path_size;
int segment_cnt = 0;
char *dirname = NULL, *p, *sub_path;
char *dirname = NULL, *sub_path;
char *path = NULL;
char *vtt_dirname = NULL;
AVDictionary *options = NULL;
@ -517,13 +517,8 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
}
if (segment && !hls->use_localtime_mkdir) {
dirname = hls->segment_filename ? av_strdup(hls->segment_filename): av_strdup(vs->avf->url);
if (!dirname) {
ret = AVERROR(ENOMEM);
goto fail;
}
p = (char *)av_basename(dirname);
*p = '\0';
char *dirname_r = hls->segment_filename ? av_strdup(hls->segment_filename): av_strdup(vs->avf->url);
dirname = (char*)av_dirname(dirname_r);
}
/* if %v is present in the file's directory
@ -542,7 +537,7 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
}
}
av_free(dirname);
av_freep(&dirname);
dirname = r_dirname;
}
@ -578,13 +573,8 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
}
if ((segment->sub_filename[0] != '\0')) {
vtt_dirname = av_strdup(vs->vtt_avf->url);
if (!vtt_dirname) {
ret = AVERROR(ENOMEM);
goto fail;
}
p = (char *)av_basename(vtt_dirname);
*p = '\0';
char *vtt_dirname_r = av_strdup(vs->vtt_avf->url);
vtt_dirname = (char*)av_dirname(vtt_dirname_r);
sub_path_size = strlen(segment->sub_filename) + 1 + strlen(vtt_dirname);
sub_path = av_malloc(sub_path_size);
if (!sub_path) {
@ -600,7 +590,7 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
if ((ret = vs->vtt_avf->io_open(vs->vtt_avf, &out, sub_path, AVIO_FLAG_WRITE, &options)) < 0) {
if (hls->ignore_io_errors)
ret = 0;
av_free(sub_path);
av_freep(&sub_path);
goto fail;
}
ff_format_io_close(vs->vtt_avf, &out);
@ -608,18 +598,18 @@ static int hls_delete_old_segments(AVFormatContext *s, HLSContext *hls,
av_log(hls, AV_LOG_ERROR, "failed to delete old segment %s: %s\n",
sub_path, strerror(errno));
}
av_free(sub_path);
av_freep(&sub_path);
}
av_freep(&path);
previous_segment = segment;
segment = previous_segment->next;
av_free(previous_segment);
av_freep(&previous_segment);
}
fail:
av_free(path);
av_free(dirname);
av_free(vtt_dirname);
av_freep(&path);
av_freep(&dirname);
av_freep(&vtt_dirname);
return ret;
}
@ -897,7 +887,7 @@ static int sls_flags_filename_process(struct AVFormatContext *s, HLSContext *hls
strlen(vs->current_segment_final_filename_fmt)) {
char * new_url = av_strdup(vs->current_segment_final_filename_fmt);
if (!new_url) {
av_free(en);
av_freep(&en);
return AVERROR(ENOMEM);
}
ff_format_set_url(vs->avf, new_url);
@ -908,8 +898,8 @@ static int sls_flags_filename_process(struct AVFormatContext *s, HLSContext *hls
"Invalid second level segment filename template '%s', "
"you can try to remove second_level_segment_size flag\n",
vs->avf->url);
av_free(filename);
av_free(en);
av_freep(&filename);
av_freep(&en);
return AVERROR(EINVAL);
}
ff_format_set_url(vs->avf, filename);
@ -922,8 +912,8 @@ static int sls_flags_filename_process(struct AVFormatContext *s, HLSContext *hls
"Invalid second level segment filename template '%s', "
"you can try to remove second_level_segment_time flag\n",
vs->avf->url);
av_free(filename);
av_free(en);
av_freep(&filename);
av_freep(&en);
return AVERROR(EINVAL);
}
ff_format_set_url(vs->avf, filename);
@ -995,7 +985,7 @@ static int sls_flag_use_localtime_filename(AVFormatContext *oc, HLSContext *c, V
av_log(c, AV_LOG_ERROR, "Invalid second level segment filename template '%s', "
"you can try to remove second_level_segment_index flag\n",
oc->url);
av_free(filename);
av_freep(&filename);
return AVERROR(EINVAL);
}
ff_format_set_url(oc, filename);
@ -1009,7 +999,7 @@ static int sls_flag_use_localtime_filename(AVFormatContext *oc, HLSContext *c, V
av_log(c, AV_LOG_ERROR, "Invalid second level segment filename template '%s', "
"you can try to remove second_level_segment_size flag\n",
oc->url);
av_free(filename);
av_freep(&filename);
return AVERROR(EINVAL);
}
ff_format_set_url(oc, filename);
@ -1020,7 +1010,7 @@ static int sls_flag_use_localtime_filename(AVFormatContext *oc, HLSContext *c, V
av_log(c, AV_LOG_ERROR, "Invalid second level segment filename template '%s', "
"you can try to remove second_level_segment_time flag\n",
oc->url);
av_free(filename);
av_freep(&filename);
return AVERROR(EINVAL);
}
ff_format_set_url(oc, filename);
@ -1108,7 +1098,7 @@ static int hls_append_segment(struct AVFormatContext *s, HLSContext *hls,
if ((ret = hls_delete_old_segments(s, hls, vs)) < 0)
return ret;
} else
av_free(en);
av_freep(&en);
} else
vs->nb_entries++;
@ -1217,7 +1207,7 @@ static void hls_free_segments(HLSSegment *p)
while (p) {
en = p;
p = p->next;
av_free(en);
av_freep(&en);
}
}
@ -1305,7 +1295,7 @@ static int create_master_playlist(AVFormatContext *s,
ret = hlsenc_io_open(s, &hls->m3u8_out, temp_filename, &options);
av_dict_free(&options);
if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Failed to open master play list file '%s'\n",
av_log(s, AV_LOG_ERROR, "Failed to open master play list file '%s'\n",
temp_filename);
goto fail;
}
@ -1344,7 +1334,7 @@ static int create_master_playlist(AVFormatContext *s,
m3u8_rel_name = get_relative_url(hls->master_m3u8_url, vs->m3u8_name);
if (!m3u8_rel_name) {
av_log(NULL, AV_LOG_ERROR, "Unable to find relative URL\n");
av_log(s, AV_LOG_ERROR, "Unable to find relative URL\n");
goto fail;
}
@ -1358,7 +1348,7 @@ static int create_master_playlist(AVFormatContext *s,
}
if (!vid_st && !aud_st) {
av_log(NULL, AV_LOG_WARNING, "Media stream not found\n");
av_log(s, AV_LOG_WARNING, "Media stream not found\n");
continue;
}
@ -1399,7 +1389,7 @@ static int create_master_playlist(AVFormatContext *s,
}
}
if (j == hls->nb_ccstreams)
av_log(NULL, AV_LOG_WARNING, "mapping ccgroup %s not found\n",
av_log(s, AV_LOG_WARNING, "mapping ccgroup %s not found\n",
vs->ccgroup);
}
@ -1584,7 +1574,7 @@ static int hls_start(AVFormatContext *s, VariantStream *vs)
#else
vs->basename, 'd', vs->sequence) < 1) {
#endif
av_free(filename);
av_freep(&filename);
av_log(oc, AV_LOG_ERROR, "Invalid segment filename template '%s', you can try to use -strftime 1 with it\n", vs->basename);
return AVERROR(EINVAL);
}
@ -1613,16 +1603,13 @@ static int hls_start(AVFormatContext *s, VariantStream *vs)
if (c->use_localtime_mkdir) {
const char *dir;
char *fn_copy = av_strdup(oc->url);
if (!fn_copy) {
return AVERROR(ENOMEM);
}
dir = av_dirname(fn_copy);
if (ff_mkdir_p(dir) == -1 && errno != EEXIST) {
av_log(oc, AV_LOG_ERROR, "Could not create directory %s with use_localtime_mkdir\n", dir);
av_free(fn_copy);
av_freep(&fn_copy);
return AVERROR(errno);
}
av_free(fn_copy);
av_freep(&fn_copy);
}
} else {
char *filename = NULL;
@ -1632,7 +1619,7 @@ static int hls_start(AVFormatContext *s, VariantStream *vs)
#else
vs->basename, 'd', vs->sequence) < 1) {
#endif
av_free(filename);
av_freep(&filename);
av_log(oc, AV_LOG_ERROR, "Invalid segment filename template '%s' you can try to use -strftime 1 with it\n", vs->basename);
return AVERROR(EINVAL);
}
@ -1646,7 +1633,7 @@ static int hls_start(AVFormatContext *s, VariantStream *vs)
#else
vs->vtt_basename, 'd', vs->sequence) < 1) {
#endif
av_free(filename);
av_freep(&filename);
av_log(vtt_oc, AV_LOG_ERROR, "Invalid segment filename template '%s'\n", vs->vtt_basename);
return AVERROR(EINVAL);
}
@ -1784,11 +1771,6 @@ static int validate_name(int nb_vs, const char *fn)
}
fn_dup = av_strdup(fn);
if (!fn_dup) {
ret = AVERROR(ENOMEM);
goto fail;
}
filename = av_basename(fn);
subdir_name = av_dirname(fn_dup);
@ -1846,11 +1828,6 @@ static int format_name(const char *buf, char **s, int index, const char *varname
/* if %v is present in the file's directory, create sub-directory */
if (av_stristr(dir, "%v") && proto && !strcmp(proto, "file")) {
mod_buf_dup = av_strdup(*s);
if (!mod_buf_dup) {
ret = AVERROR(ENOMEM);
goto fail;
}
dir = av_dirname(mod_buf_dup);
if (ff_mkdir_p(dir) == -1 && errno != EEXIST) {
ret = AVERROR(errno);
@ -2124,7 +2101,7 @@ static int update_variant_stream_info(AVFormatContext *s)
hls->var_streams[0].streams = av_mallocz(sizeof(AVStream *) *
hls->var_streams[0].nb_streams);
if (!hls->var_streams[0].streams) {
av_free(hls->var_streams);
av_freep(&hls->var_streams);
return AVERROR(ENOMEM);
}
@ -2132,7 +2109,7 @@ static int update_variant_stream_info(AVFormatContext *s)
if (hls->nb_ccstreams) {
hls->var_streams[0].ccgroup = av_strdup(hls->cc_streams[0].ccgroup);
if (!hls->var_streams[0].ccgroup) {
av_free(hls->var_streams);
av_freep(&hls->var_streams);
return AVERROR(ENOMEM);
}
}
@ -2151,11 +2128,6 @@ static int update_master_pl_info(AVFormatContext *s)
int ret = 0;
fn1 = av_strdup(s->url);
if (!fn1) {
ret = AVERROR(ENOMEM);
goto fail;
}
dir = av_dirname(fn1);
/**
@ -2164,10 +2136,6 @@ static int update_master_pl_info(AVFormatContext *s)
*/
if (dir && av_stristr(av_basename(dir), "%v")) {
fn2 = av_strdup(dir);
if (!fn2) {
ret = AVERROR(ENOMEM);
goto fail;
}
dir = av_dirname(fn2);
}
@ -2338,7 +2306,7 @@ static int hls_write_packet(AVFormatContext *s, AVPacket *pkt)
if (!vs->init_range_length) {
range_length = avio_close_dyn_buf(oc->pb, &buffer);
avio_write(vs->out, buffer, range_length);
av_free(buffer);
av_freep(&buffer);
vs->init_range_length = range_length;
avio_open_dyn_buf(&oc->pb);
vs->packets_written = 0;
@ -2407,8 +2375,8 @@ static int hls_write_packet(AVFormatContext *s, AVPacket *pkt)
reflush_dynbuf(vs, &range_length);
ret = hlsenc_io_close(s, &vs->out, filename);
}
av_free(vs->temp_buffer);
av_free(filename);
av_freep(&vs->temp_buffer);
av_freep(&filename);
}
}
@ -2426,7 +2394,7 @@ static int hls_write_packet(AVFormatContext *s, AVPacket *pkt)
vs->end_pts = pkt->pts;
vs->duration = 0;
if (ret < 0) {
av_free(old_filename);
av_freep(&old_filename);
return ret;
}
}
@ -2438,7 +2406,7 @@ static int hls_write_packet(AVFormatContext *s, AVPacket *pkt)
ff_format_io_close(s, &vs->out);
vs->out = NULL;
if ((ret = hls_window(s, 0, vs)) < 0) {
av_free(old_filename);
av_freep(&old_filename);
return ret;
}
}
@ -2464,7 +2432,7 @@ static int hls_write_packet(AVFormatContext *s, AVPacket *pkt)
sls_flag_file_rename(hls, vs, old_filename);
ret = hls_start(s, vs);
}
av_free(old_filename);
av_freep(&old_filename);
if (ret < 0) {
return ret;
@ -2551,7 +2519,7 @@ static int hls_write_trailer(struct AVFormatContext *s)
filename = av_asprintf("%s", oc->url);
}
if (!filename) {
av_free(old_filename);
av_freep(&old_filename);
return AVERROR(ENOMEM);
}
@ -2564,7 +2532,7 @@ static int hls_write_trailer(struct AVFormatContext *s)
range_length = avio_close_dyn_buf(oc->pb, &buffer);
avio_write(vs->out, buffer, range_length);
av_free(buffer);
av_freep(&buffer);
vs->init_range_length = range_length;
avio_open_dyn_buf(&oc->pb);
vs->packets_written = 0;
@ -2606,10 +2574,10 @@ static int hls_write_trailer(struct AVFormatContext *s)
if (ret < 0)
av_log(s, AV_LOG_WARNING, "Failed to upload file '%s' at the end.\n", oc->url);
}
av_free(vs->temp_buffer);
av_freep(&vs->temp_buffer);
failed:
av_free(filename);
av_freep(&filename);
av_write_trailer(oc);
if (oc->url[0]) {
proto = avio_find_protocol_name(oc->url);
@ -2619,7 +2587,7 @@ failed:
// rename that segment from .tmp to the real one
if (use_temp_file && !(hls->flags & HLS_SINGLE_FILE)) {
hls_rename_temp_file(s, oc);
av_free(old_filename);
av_freep(&old_filename);
old_filename = av_strdup(oc->url);
if (!old_filename) {

View File

@ -320,8 +320,11 @@ int ff_http_get_shutdown_status(URLContext *h)
return ret;
}
int ff_http_do_new_request(URLContext *h, const char *uri) {
return ff_http_do_new_request2(h, uri, NULL);
}
int ff_http_do_new_request(URLContext *h, const char *uri)
int ff_http_do_new_request2(URLContext *h, const char *uri, AVDictionary **opts)
{
HTTPContext *s = h->priv_data;
AVDictionary *options = NULL;
@ -366,6 +369,9 @@ int ff_http_do_new_request(URLContext *h, const char *uri)
if (!s->location)
return AVERROR(ENOMEM);
if ((ret = av_opt_set_dict(s, opts)) < 0)
return ret;
av_log(s, AV_LOG_INFO, "Opening \'%s\' for %s\n", uri, h->flags & AVIO_FLAG_WRITE ? "writing" : "reading");
ret = http_open_cnx(h, &options);
av_dict_free(&options);

View File

@ -56,6 +56,19 @@ int ff_http_get_shutdown_status(URLContext *h);
*/
int ff_http_do_new_request(URLContext *h, const char *uri);
/**
* Send a new HTTP request, reusing the old connection.
*
* @param h pointer to the resource
* @param uri uri used to perform the request
* @param options A dictionary filled with HTTP options. On return
* this parameter will be destroyed and replaced with a dict containing options
* that were not found. May be NULL.
* @return a negative value if an error condition occurred, 0
* otherwise
*/
int ff_http_do_new_request2(URLContext *h, const char *uri, AVDictionary **options);
int ff_http_averror(int status_code, int default_averror);
#endif /* AVFORMAT_HTTP_H */

View File

@ -529,8 +529,11 @@ static int iff_read_header(AVFormatContext *s)
st->codecpar->extradata = av_malloc(data_size + IFF_EXTRA_VIDEO_SIZE + AV_INPUT_BUFFER_PADDING_SIZE);
if (!st->codecpar->extradata)
return AVERROR(ENOMEM);
if (avio_read(pb, st->codecpar->extradata + IFF_EXTRA_VIDEO_SIZE, data_size) < 0)
if (avio_read(pb, st->codecpar->extradata + IFF_EXTRA_VIDEO_SIZE, data_size) < 0) {
av_freep(&st->codecpar->extradata);
st->codecpar->extradata_size = 0;
return AVERROR(EIO);
}
break;
case ID_BMHD:

View File

@ -53,7 +53,8 @@ static int read_header(AVFormatContext *s)
st->codecpar->height = avio_rl16(s->pb);
time_base.den = avio_rl32(s->pb);
time_base.num = avio_rl32(s->pb);
st->duration = avio_rl64(s->pb);
st->duration = avio_rl32(s->pb);
avio_skip(s->pb, 4); // unused
st->need_parsing = AVSTREAM_PARSE_HEADERS;

View File

@ -53,7 +53,7 @@ static int ivf_write_header(AVFormatContext *s)
avio_wl16(pb, par->height);
avio_wl32(pb, s->streams[0]->time_base.den);
avio_wl32(pb, s->streams[0]->time_base.num);
avio_wl64(pb, 0xFFFFFFFFFFFFFFFFULL);
avio_wl64(pb, 0xFFFFFFFFFFFFFFFFULL); // length is overwritten at the end of muxing
return 0;
}
@ -83,7 +83,9 @@ static int ivf_write_trailer(AVFormatContext *s)
size_t end = avio_tell(pb);
avio_seek(pb, 24, SEEK_SET);
avio_wl64(pb, ctx->frame_cnt * ctx->sum_delta_pts / (ctx->frame_cnt - 1));
// overwrite the "length" field (duration)
avio_wl32(pb, ctx->frame_cnt * ctx->sum_delta_pts / (ctx->frame_cnt - 1));
avio_wl32(pb, 0); // zero out unused bytes
avio_seek(pb, end, SEEK_SET);
}

View File

@ -113,9 +113,10 @@ static int read_header(AVFormatContext *s)
return AVERROR(ENOMEM);
jv->frames = av_malloc(ast->nb_index_entries * sizeof(JVFrame));
if (!jv->frames)
if (!jv->frames) {
av_freep(&ast->index_entries);
return AVERROR(ENOMEM);
}
offset = 0x68 + ast->nb_index_entries * 16;
for (i = 0; i < ast->nb_index_entries; i++) {
AVIndexEntry *e = ast->index_entries + i;
@ -137,6 +138,8 @@ static int read_header(AVFormatContext *s)
- jvf->palette_size < 0) {
if (s->error_recognition & AV_EF_EXPLODE) {
read_close(s);
av_freep(&jv->frames);
av_freep(&ast->index_entries);
return AVERROR_INVALIDDATA;
}
jvf->audio_size =

View File

@ -60,7 +60,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
if (mms->asf_header_size < sizeof(ff_asf_guid) * 2 + 22 ||
memcmp(p, ff_asf_header, sizeof(ff_asf_guid))) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (invalid ASF header, size=%d)\n",
mms->asf_header_size);
return AVERROR_INVALIDDATA;
@ -77,7 +77,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
chunksize = AV_RL64(p + sizeof(ff_asf_guid));
}
if (!chunksize || chunksize > end - p) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (header chunksize %"PRId64" is invalid)\n",
chunksize);
return AVERROR_INVALIDDATA;
@ -87,7 +87,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
if (end - p > sizeof(ff_asf_guid) * 2 + 68) {
mms->asf_packet_len = AV_RL32(p + sizeof(ff_asf_guid) * 2 + 64);
if (mms->asf_packet_len <= 0 || mms->asf_packet_len > sizeof(mms->in_buffer)) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (too large pkt_len %d)\n",
mms->asf_packet_len);
return AVERROR_INVALIDDATA;
@ -110,7 +110,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
mms->streams[mms->stream_num].id = stream_id;
mms->stream_num++;
} else {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (too many A/V streams)\n");
return AVERROR_INVALIDDATA;
}
@ -121,7 +121,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
uint64_t skip_bytes = 88;
while (stream_count--) {
if (end - p < skip_bytes + 4) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (next stream name length is not in the buffer)\n");
return AVERROR_INVALIDDATA;
}
@ -129,14 +129,14 @@ int ff_mms_asf_header_parser(MMSContext *mms)
}
while (ext_len_count--) {
if (end - p < skip_bytes + 22) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (next extension system info length is not in the buffer)\n");
return AVERROR_INVALIDDATA;
}
skip_bytes += 22 + AV_RL32(p + skip_bytes + 18);
}
if (end - p < skip_bytes) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (the last extension system info length is invalid)\n");
return AVERROR_INVALIDDATA;
}
@ -146,7 +146,7 @@ int ff_mms_asf_header_parser(MMSContext *mms)
} else if (!memcmp(p, ff_asf_head1_guid, sizeof(ff_asf_guid))) {
chunksize = 46; // see references [2] section 3.4. This should be set 46.
if (chunksize > end - p) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Corrupt stream (header chunksize %"PRId64" is invalid)\n",
chunksize);
return AVERROR_INVALIDDATA;

View File

@ -141,7 +141,7 @@ static int send_command_packet(MMSTContext *mmst)
// write it out.
write_result= ffurl_write(mms->mms_hd, mms->out_buffer, exact_length);
if(write_result != exact_length) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Failed to write data of length %d: %d (%s)\n",
exact_length, write_result,
write_result < 0 ? strerror(AVUNERROR(write_result)) :
@ -215,11 +215,11 @@ static int send_media_file_request(MMSTContext *mmst)
static void handle_packet_stream_changing_type(MMSTContext *mmst)
{
MMSContext *mms = &mmst->mms;
av_log(NULL, AV_LOG_TRACE, "Stream changing!\n");
av_log(mms->mms_hd, AV_LOG_TRACE, "Stream changing!\n");
// 40 is the packet header size, 7 is the prefix size.
mmst->header_packet_id= AV_RL32(mms->in_buffer + 40 + 7);
av_log(NULL, AV_LOG_TRACE, "Changed header prefix to 0x%x", mmst->header_packet_id);
av_log(mms->mms_hd, AV_LOG_TRACE, "Changed header prefix to 0x%x", mmst->header_packet_id);
}
static int send_keepalive_packet(MMSTContext *mmst)
@ -251,12 +251,12 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
read_result = ffurl_read_complete(mms->mms_hd, mms->in_buffer, 8);
if (read_result != 8) {
if(read_result < 0) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Error reading packet header: %d (%s)\n",
read_result, strerror(AVUNERROR(read_result)));
packet_type = SC_PKT_CANCEL;
} else {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"The server closed the connection\n");
packet_type = SC_PKT_NO_DATA;
}
@ -270,7 +270,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
mmst->incoming_flags= mms->in_buffer[3];
read_result= ffurl_read_complete(mms->mms_hd, mms->in_buffer+8, 4);
if(read_result != 4) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Reading command packet length failed: %d (%s)\n",
read_result,
read_result < 0 ? strerror(AVUNERROR(read_result)) :
@ -279,11 +279,11 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
}
length_remaining= AV_RL32(mms->in_buffer+8) + 4;
av_log(NULL, AV_LOG_TRACE, "Length remaining is %d\n", length_remaining);
av_log(mms->mms_hd, AV_LOG_TRACE, "Length remaining is %d\n", length_remaining);
// read the rest of the packet.
if (length_remaining < 0
|| length_remaining > sizeof(mms->in_buffer) - 12) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Incoming packet length %d exceeds bufsize %"SIZE_SPECIFIER"\n",
length_remaining, sizeof(mms->in_buffer) - 12);
return AVERROR_INVALIDDATA;
@ -291,7 +291,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
read_result = ffurl_read_complete(mms->mms_hd, mms->in_buffer + 12,
length_remaining) ;
if (read_result != length_remaining) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Reading pkt data (length=%d) failed: %d (%s)\n",
length_remaining, read_result,
read_result < 0 ? strerror(AVUNERROR(read_result)) :
@ -300,7 +300,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
}
packet_type= AV_RL16(mms->in_buffer+36);
if (read_result >= 44 && (hr = AV_RL32(mms->in_buffer + 40))) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Server sent a message with packet type 0x%x and error status code 0x%08x\n", packet_type, hr);
return AVERROR(EINVAL);
}
@ -319,7 +319,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
if (length_remaining < 0
|| length_remaining > sizeof(mms->in_buffer) - 8) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Data length %d is invalid or too large (max=%"SIZE_SPECIFIER")\n",
length_remaining, sizeof(mms->in_buffer));
return AVERROR_INVALIDDATA;
@ -328,7 +328,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
mms->read_in_ptr = mms->in_buffer;
read_result= ffurl_read_complete(mms->mms_hd, mms->in_buffer, length_remaining);
if(read_result != length_remaining) {
av_log(NULL, AV_LOG_ERROR,
av_log(mms->mms_hd, AV_LOG_ERROR,
"Failed to read packet data of size %d: %d (%s)\n",
length_remaining, read_result,
read_result < 0 ? strerror(AVUNERROR(read_result)) :
@ -358,7 +358,7 @@ static MMSSCPacketType get_tcp_server_response(MMSTContext *mmst)
} else if(packet_id_type == mmst->packet_id) {
packet_type = SC_PKT_ASF_MEDIA;
} else {
av_log(NULL, AV_LOG_TRACE, "packet id type %d is old.", packet_id_type);
av_log(mms->mms_hd, AV_LOG_TRACE, "packet id type %d is old.", packet_id_type);
continue;
}
}
@ -555,14 +555,14 @@ static int mms_open(URLContext *h, const char *uri, int flags)
if (err)
goto fail;
if((mmst->incoming_flags != 0X08) && (mmst->incoming_flags != 0X0C)) {
av_log(NULL, AV_LOG_ERROR,
av_log(h, AV_LOG_ERROR,
"The server does not support MMST (try MMSH or RTSP)\n");
err = AVERROR(EINVAL);
goto fail;
}
err = ff_mms_asf_header_parser(mms);
if (err) {
av_log(NULL, AV_LOG_TRACE, "asf header parsed failed!\n");
av_log(h, AV_LOG_TRACE, "asf header parsed failed!\n");
goto fail;
}
mms->header_parsed = 1;
@ -579,11 +579,11 @@ static int mms_open(URLContext *h, const char *uri, int flags)
if (err) {
goto fail;
}
av_log(NULL, AV_LOG_TRACE, "Leaving open (success)\n");
av_log(h, AV_LOG_TRACE, "Leaving open (success)\n");
return 0;
fail:
mms_close(h);
av_log(NULL, AV_LOG_TRACE, "Leaving open (failure: %d)\n", err);
av_log(mms->mms_hd, AV_LOG_TRACE, "Leaving open (failure: %d)\n", err);
return err;
}
@ -608,7 +608,7 @@ static int mms_read(URLContext *h, uint8_t *buf, int size)
int err = mms_safe_send_recv(mmst, NULL, SC_PKT_ASF_MEDIA);
if (err == 0) {
if(mms->remaining_in_len>mms->asf_packet_len) {
av_log(NULL, AV_LOG_ERROR,
av_log(h, AV_LOG_ERROR,
"Incoming pktlen %d is larger than ASF pktsize %d\n",
mms->remaining_in_len, mms->asf_packet_len);
result= AVERROR(EIO);
@ -616,12 +616,12 @@ static int mms_read(URLContext *h, uint8_t *buf, int size)
// copy the data to the packet buffer.
result = ff_mms_read_data(mms, buf, size);
if (result == 0) {
av_log(NULL, AV_LOG_TRACE, "Read ASF media packet size is zero!\n");
av_log(h, AV_LOG_TRACE, "Read ASF media packet size is zero!\n");
break;
}
}
} else {
av_log(NULL, AV_LOG_TRACE, "read packet error!\n");
av_log(h, AV_LOG_TRACE, "read packet error!\n");
break;
}
}

View File

@ -489,7 +489,6 @@ static int mpegps_read_packet(AVFormatContext *s,
MpegDemuxContext *m = s->priv_data;
AVStream *st;
int len, startcode, i, es_type, ret;
int lpcm_header_len = -1; //Init to suppress warning
int pcm_dvd = 0;
int request_probe= 0;
enum AVCodecID codec_id = AV_CODEC_ID_NONE;
@ -507,8 +506,7 @@ redo:
if (!m->raw_ac3) {
/* audio: skip header */
avio_r8(s->pb);
lpcm_header_len = avio_rb16(s->pb);
avio_skip(s->pb, 3);
len -= 3;
if (startcode >= 0xb0 && startcode <= 0xbf) {
/* MLP/TrueHD audio has a 4-byte header */

View File

@ -163,6 +163,9 @@ static int mpsub_read_header(AVFormatContext *s)
ff_subtitles_queue_finalize(s, &mpsub->q);
end:
if (res < 0)
ff_subtitles_queue_clean(&mpsub->q);
av_bprint_finalize(&buf, NULL);
return res;
}

View File

@ -238,7 +238,7 @@ int ff_accept(int fd, int timeout, URLContext *h)
if (ret < 0)
return ff_neterrno();
if (ff_socket_nonblock(ret, 1) < 0)
av_log(NULL, AV_LOG_DEBUG, "ff_socket_nonblock failed\n");
av_log(h, AV_LOG_DEBUG, "ff_socket_nonblock failed\n");
return ret;
}
@ -264,7 +264,7 @@ int ff_listen_connect(int fd, const struct sockaddr *addr,
socklen_t optlen;
if (ff_socket_nonblock(fd, 1) < 0)
av_log(NULL, AV_LOG_DEBUG, "ff_socket_nonblock failed\n");
av_log(h, AV_LOG_DEBUG, "ff_socket_nonblock failed\n");
while ((ret = connect(fd, addr, addrlen))) {
ret = ff_neterrno();

View File

@ -2386,7 +2386,7 @@ static int handle_metadata(RTMPContext *rt, RTMPPacket *pkt)
next += size + 3 + 4;
}
if (p != rt->flv_data + rt->flv_size) {
av_log(NULL, AV_LOG_WARNING, "Incomplete flv packets in "
av_log(rt, AV_LOG_WARNING, "Incomplete flv packets in "
"RTMP_PT_METADATA packet\n");
rt->flv_size = p - rt->flv_data;
}

View File

@ -274,7 +274,7 @@ static int udp_set_multicast_sources(URLContext *h,
}
return 0;
#else
av_log(NULL, AV_LOG_ERROR,
av_log(h, AV_LOG_ERROR,
"Setting multicast sources only supported for IPv4\n");
return AVERROR(EINVAL);
#endif
@ -283,7 +283,7 @@ static int udp_set_multicast_sources(URLContext *h,
for (i = 0; i < nb_sources; i++) {
struct ip_mreq_source mreqs;
if (sources[i].ss_family != AF_INET) {
av_log(NULL, AV_LOG_ERROR, "Source/block address %d is of incorrect protocol family\n", i + 1);
av_log(h, AV_LOG_ERROR, "Source/block address %d is of incorrect protocol family\n", i + 1);
return AVERROR(EINVAL);
}
@ -298,9 +298,9 @@ static int udp_set_multicast_sources(URLContext *h,
include ? IP_ADD_SOURCE_MEMBERSHIP : IP_BLOCK_SOURCE,
(const void *)&mreqs, sizeof(mreqs)) < 0) {
if (include)
ff_log_net_error(NULL, AV_LOG_ERROR, "setsockopt(IP_ADD_SOURCE_MEMBERSHIP)");
ff_log_net_error(h, AV_LOG_ERROR, "setsockopt(IP_ADD_SOURCE_MEMBERSHIP)");
else
ff_log_net_error(NULL, AV_LOG_ERROR, "setsockopt(IP_BLOCK_SOURCE)");
ff_log_net_error(h, AV_LOG_ERROR, "setsockopt(IP_BLOCK_SOURCE)");
return ff_neterrno();
}
}

View File

@ -257,8 +257,12 @@ char *av_strireplace(const char *str, const char *from, const char *to)
const char *av_basename(const char *path)
{
char *p = strrchr(path, '/');
char *p;
if (!path || *path == '\0')
return ".";
p = strrchr(path, '/');
#if HAVE_DOS_PATHS
char *q = strrchr(path, '\\');
char *d = strchr(path, ':');
@ -274,11 +278,11 @@ const char *av_basename(const char *path)
const char *av_dirname(char *path)
{
char *p = strrchr(path, '/');
char *p = path ? strrchr(path, '/') : NULL;
#if HAVE_DOS_PATHS
char *q = strrchr(path, '\\');
char *d = strchr(path, ':');
char *q = path ? strrchr(path, '\\') : NULL;
char *d = path ? strchr(path, ':') : NULL;
d = d ? d + 1 : d;

View File

@ -274,16 +274,21 @@ char *av_strireplace(const char *str, const char *from, const char *to);
/**
* Thread safe basename.
* @param path the path, on DOS both \ and / are considered separators.
* @param path the string to parse, on DOS both \ and / are considered separators.
* @return pointer to the basename substring.
* If path does not contain a slash, the function returns a copy of path.
* If path is a NULL pointer or points to an empty string, a pointer
* to a string "." is returned.
*/
const char *av_basename(const char *path);
/**
* Thread safe dirname.
* @param path the path, on DOS both \ and / are considered separators.
* @return the path with the separator replaced by the string terminator or ".".
* @note the function may change the input string.
* @param path the string to parse, on DOS both \ and / are considered separators.
* @return A pointer to a string that's the parent directory of path.
* If path is a NULL pointer or points to an empty string, a pointer
* to a string "." is returned.
* @note the function may modify the contents of the path, so copies should be passed.
*/
const char *av_dirname(char *path);

View File

@ -312,47 +312,47 @@ FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM
fate-filter-hdcd-mix: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-mix: CMD = md5 -i $(SRC) -af hdcd -f s24le
fate-filter-hdcd-mix: CMP = oneline
fate-filter-hdcd-mix: REF = e7079913e90c124460cdbc712df5b84c
fate-filter-hdcd-mix: REF = 77443573e0bd3532de52a8bc0e825da7
# output will be different because of the gain mismatch in the second and third parts
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-mix-psoff
fate-filter-hdcd-mix-psoff: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-mix-psoff: CMD = md5 -i $(SRC) -af hdcd=process_stereo=false -f s24le
fate-filter-hdcd-mix-psoff: CMP = oneline
fate-filter-hdcd-mix-psoff: REF = bd0e81fe17696b825ee3515ab928e6bb
fate-filter-hdcd-mix-psoff: REF = 89e57885917a436b30855db4d478cefb
# test the different analyze modes
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-pe
fate-filter-hdcd-analyze-pe: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-pe: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=pe -f s24le
fate-filter-hdcd-analyze-pe: CMP = oneline
fate-filter-hdcd-analyze-pe: REF = bb83e97bbd0064b9b1c0ef2f2c8f0c77
fate-filter-hdcd-analyze-pe: REF = 2d839d8a1cf73b10a566ce3d4cfaa79e
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-lle
fate-filter-hdcd-analyze-lle: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-lle: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=lle -f s24le
fate-filter-hdcd-analyze-lle: CMP = oneline
fate-filter-hdcd-analyze-lle: REF = 121cc4a681aa0caef5c664fece7a3ddc
fate-filter-hdcd-analyze-lle: REF = b4b185332b7025c191062f49a2c015f1
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-cdt
fate-filter-hdcd-analyze-cdt: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-cdt: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=cdt -f s24le
fate-filter-hdcd-analyze-cdt: CMP = oneline
fate-filter-hdcd-analyze-cdt: REF = 12136e6a00dd532994f6edcc347af1d4
fate-filter-hdcd-analyze-cdt: REF = afa6577675c63e87da3edbd442b7b6e2
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-tgm
fate-filter-hdcd-analyze-tgm: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-tgm: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=tgm -f s24le
fate-filter-hdcd-analyze-tgm: CMP = oneline
fate-filter-hdcd-analyze-tgm: REF = a3c39f62e9b9b42c9c440d0045d5fb2f
fate-filter-hdcd-analyze-tgm: REF = 285f0fd2249b4903cd5e1ad5ce004219
# the two additional analyze modes from libhdcd
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-ltgm
fate-filter-hdcd-analyze-ltgm: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-ltgm: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=lle:process_stereo=false -f s24le
fate-filter-hdcd-analyze-ltgm: CMP = oneline
fate-filter-hdcd-analyze-ltgm: REF = 76ffd86b762b5a93332039f27e4c0c0e
fate-filter-hdcd-analyze-ltgm: REF = 404dc2301ea97e9f96c3d6d2ebcfeaa5
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-analyze-pel
fate-filter-hdcd-analyze-pel: SRC = $(TARGET_SAMPLES)/filter/hdcd-mix.flac
fate-filter-hdcd-analyze-pel: CMD = md5 -i $(SRC) -af hdcd=analyze_mode=pe:force_pe=true -f s24le
fate-filter-hdcd-analyze-pel: CMP = oneline
fate-filter-hdcd-analyze-pel: REF = 8156c5a3658d789ab46447d62151f5e9
fate-filter-hdcd-analyze-pel: REF = 9342983208ec1a7f2b3e332ac4dcb723
FATE_AFILTER_SAMPLES-$(call FILTERDEMDECENCMUX, HDCD, FLAC, FLAC, PCM_S24LE, PCM_S24LE) += fate-filter-hdcd-false-positive
fate-filter-hdcd-false-positive: SRC = $(TARGET_SAMPLES)/filter/hdcd-false-positive.flac