Merge remote-tracking branch 'qatar/master'

* qatar/master:
  mpegvideo: reduce excessive inlining of mpeg_motion()
  mpegvideo: convert mpegvideo_common.h to a .c file
  build: factor out mpegvideo.o dependencies to CONFIG_MPEGVIDEO
  Move MASK_ABS macro to libavcodec/mathops.h
  x86: move MANGLE() and related macros to libavutil/x86/asm.h
  x86: rename libavutil/x86_cpu.h to libavutil/x86/asm.h
  aacdec: Don't fall back to the old output configuration when no old configuration is present.
  rtmp: Add message tracking
  rtsp: Support mpegts in raw udp packets
  rtsp: Support receiving plain data over UDP without any RTP encapsulation
  rtpdec: Remove an unused include
  rtpenc: Remove an av_abort() that depends on user-supplied data
  vsrc_movie: discourage its use with avconv.
  avconv: allow no input files.
  avconv: prevent invalid reads in transcode_init()
  avconv: rename OutputStream.is_past_recording_time to finished.

Conflicts:
	configure
	doc/filters.texi
	ffmpeg.c
	ffmpeg.h
	libavcodec/Makefile
	libavcodec/aacdec.c
	libavcodec/mpegvideo.c
	libavformat/version.h

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2012-08-09 19:09:39 +02:00
commit 9f088a1ed4
56 changed files with 533 additions and 445 deletions

46
configure vendored
View File

@ -1361,6 +1361,7 @@ CONFIG_EXTRA="
lgplv3
lpc
mpegaudiodsp
mpegvideo
nettle
rtpdec
sinewin
@ -1500,18 +1501,18 @@ atrac1_decoder_select="mdct sinewin"
atrac3_decoder_select="mdct"
binkaudio_dct_decoder_select="mdct rdft dct sinewin"
binkaudio_rdft_decoder_select="mdct rdft sinewin"
cavs_decoder_select="golomb"
cavs_decoder_select="golomb mpegvideo"
cook_decoder_select="mdct sinewin"
cscd_decoder_suggest="zlib"
dca_decoder_select="mdct"
dirac_decoder_select="dwt golomb"
dnxhd_encoder_select="aandcttables"
dnxhd_encoder_select="aandcttables mpegvideo"
dxa_decoder_select="zlib"
eac3_decoder_select="ac3_decoder"
eac3_encoder_select="mdct ac3dsp"
eamad_decoder_select="aandcttables"
eatgq_decoder_select="aandcttables"
eatqi_decoder_select="aandcttables"
eatqi_decoder_select="aandcttables mpegvideo"
exr_decoder_select="zlib"
ffv1_decoder_select="golomb"
flac_decoder_select="golomb"
@ -1523,14 +1524,15 @@ flashsv2_decoder_select="zlib"
flv_decoder_select="h263_decoder"
flv_encoder_select="h263_encoder"
fraps_decoder_select="huffman"
h261_encoder_select="aandcttables"
h263_decoder_select="h263_parser"
h263_encoder_select="aandcttables"
h261_decoder_select="mpegvideo"
h261_encoder_select="aandcttables mpegvideo"
h263_decoder_select="h263_parser mpegvideo"
h263_encoder_select="aandcttables mpegvideo"
h263_vaapi_hwaccel_select="vaapi h263_decoder"
h263i_decoder_select="h263_decoder"
h263p_encoder_select="h263_encoder"
h264_crystalhd_decoder_select="crystalhd h264_mp4toannexb_bsf h264_parser"
h264_decoder_select="golomb h264chroma h264dsp h264pred h264qpel"
h264_decoder_select="golomb h264chroma h264dsp h264pred h264qpel mpegvideo"
h264_dxva2_hwaccel_deps="dxva2api_h"
h264_dxva2_hwaccel_select="dxva2 h264_decoder"
h264_vaapi_hwaccel_select="vaapi h264_decoder"
@ -1541,9 +1543,10 @@ iac_decoder_select="fft mdct sinewin"
imc_decoder_select="fft mdct sinewin"
jpegls_decoder_select="golomb"
jpegls_encoder_select="golomb"
ljpeg_encoder_select="aandcttables"
ljpeg_encoder_select="aandcttables mpegvideo"
loco_decoder_select="golomb"
mjpeg_encoder_select="aandcttables"
mdec_decoder_select="mpegvideo"
mjpeg_encoder_select="aandcttables mpegvideo"
mlp_decoder_select="mlp_parser"
mp1_decoder_select="mpegaudiodsp"
mp1float_decoder_select="mpegaudiodsp"
@ -1562,13 +1565,15 @@ mpeg_xvmc_decoder_deps="X11_extensions_XvMClib_h"
mpeg_xvmc_decoder_select="mpegvideo_decoder"
mpeg1_vdpau_decoder_select="vdpau mpeg1video_decoder"
mpeg1_vdpau_hwaccel_select="vdpau mpeg1video_decoder"
mpeg1video_encoder_select="aandcttables"
mpeg1video_decoder_select="mpegvideo"
mpeg1video_encoder_select="aandcttables mpegvideo"
mpeg2_crystalhd_decoder_select="crystalhd"
mpeg2_dxva2_hwaccel_deps="dxva2api_h"
mpeg2_dxva2_hwaccel_select="dxva2 mpeg2video_decoder"
mpeg2_vdpau_hwaccel_select="vdpau mpeg2video_decoder"
mpeg2_vaapi_hwaccel_select="vaapi mpeg2video_decoder"
mpeg2video_encoder_select="aandcttables"
mpeg2video_encoder_select="mpegvideo"
mpeg2video_encoder_select="aandcttables mpegvideo"
mpeg4_crystalhd_decoder_select="crystalhd"
mpeg4_decoder_select="h263_decoder mpeg4video_parser"
mpeg4_encoder_select="h263_encoder"
@ -1593,17 +1598,18 @@ rv10_decoder_select="h263_decoder"
rv10_encoder_select="h263_encoder"
rv20_decoder_select="h263_decoder"
rv20_encoder_select="h263_encoder"
rv30_decoder_select="golomb h264chroma h264pred h264qpel"
rv40_decoder_select="golomb h264chroma h264pred h264qpel"
rv30_decoder_select="golomb h264chroma h264pred h264qpel mpegvideo"
rv40_decoder_select="golomb h264chroma h264pred h264qpel mpegvideo"
shorten_decoder_select="golomb"
sipr_decoder_select="lsp"
snow_decoder_select="dwt"
snow_encoder_select="aandcttables dwt"
snow_encoder_select="aandcttables dwt mpegvideo"
sonic_decoder_select="golomb"
sonic_encoder_select="golomb"
sonic_ls_encoder_select="golomb"
svq1_encoder_select="aandcttables"
svq3_decoder_select="golomb h264chroma h264dsp h264pred h264qpel"
svq1_encoder_select="mpegvideo"
svq1_encoder_select="aandcttables mpegvideo"
svq3_decoder_select="golomb h264chroma h264dsp h264pred h264qpel mpegvideo"
svq3_decoder_suggest="zlib"
theora_decoder_select="vp3_decoder"
tiff_decoder_suggest="zlib"
@ -1653,7 +1659,10 @@ vda_deps="VideoDecodeAcceleration_VDADecoder_h pthreads"
vdpau_deps="vdpau_vdpau_h vdpau_vdpau_x11_h"
# parsers
h264_parser_select="golomb h264dsp h264pred"
h264_parser_select="golomb h264dsp h264pred mpegvideo"
mpeg4video_parser_select="mpegvideo"
mpegvideo_parser_select="mpegvideo"
vc1_parser_select="mpegvideo"
# external libraries
libaacplus_encoder_deps="libaacplus"
@ -1709,12 +1718,13 @@ matroska_demuxer_suggest="zlib bzlib"
mov_demuxer_suggest="zlib"
mp3_demuxer_select="mpegaudio_parser"
mp4_muxer_select="mov_muxer"
mpegts_muxer_select="adts_muxer latm_muxer"
mpegts_muxer_select="adts_muxer latm_muxer mpegvideo"
mpegtsraw_demuxer_select="mpegts_demuxer"
mxf_d10_muxer_select="mxf_muxer"
ogg_demuxer_select="golomb"
psp_muxer_select="mov_muxer"
rtp_demuxer_select="sdp_demuxer"
rtp_muxer_select="mpegvideo"
rtpdec_select="asf_demuxer rm_demuxer rtp_protocol mpegts_demuxer mov_demuxer"
rtsp_demuxer_select="http_protocol rtpdec"
rtsp_muxer_select="rtp_muxer http_protocol rtp_protocol"

View File

@ -965,6 +965,9 @@ the matching type.
Output link labels are referred to with @option{-map}. Unlabeled outputs are
added to the first output file.
Note that with this option it is possible to use only lavfi sources without
normal input files.
For example, to overlay an image over video
@example
ffmpeg -i video.mkv -i image.png -filter_complex '[0:v][1:v]overlay[out]' -map
@ -987,6 +990,11 @@ graph will be added to the output file automatically, so we can simply write
@example
ffmpeg -i video.mkv -i image.png -filter_complex 'overlay' out.mkv
@end example
To generate 5 seconds of pure red video using lavfi @code{color} source:
@example
ffmpeg -filter_complex 'color=red' -t 5 out.mkv
@end example
@end table
As a special exception, you can use a bitmap subtitle stream as input: it

View File

@ -565,7 +565,7 @@ static int check_recording_time(OutputStream *ost)
if (of->recording_time != INT64_MAX &&
av_compare_ts(ost->sync_opts - ost->first_pts, ost->st->codec->time_base, of->recording_time,
AV_TIME_BASE_Q) >= 0) {
ost->is_past_recording_time = 1;
ost->finished = 1;
return 0;
}
return 1;
@ -1315,7 +1315,7 @@ static void do_streamcopy(InputStream *ist, OutputStream *ost, const AVPacket *p
if (of->recording_time != INT64_MAX &&
ist->pts >= of->recording_time + of->start_time) {
ost->is_past_recording_time = 1;
ost->finished = 1;
return;
}
@ -1911,7 +1911,7 @@ static int transcode_init(void)
{
int ret = 0, i, j, k;
AVFormatContext *oc;
AVCodecContext *codec, *icodec = NULL;
AVCodecContext *codec;
OutputStream *ost;
InputStream *ist;
char error[1024];
@ -1942,6 +1942,7 @@ static int transcode_init(void)
/* for each output stream, we compute the right encoding parameters */
for (i = 0; i < nb_output_streams; i++) {
AVCodecContext *icodec = NULL;
ost = output_streams[i];
oc = output_files[ost->file_index]->ctx;
ist = get_input_stream(ost);
@ -2367,13 +2368,13 @@ static int need_output(void)
OutputFile *of = output_files[ost->file_index];
AVFormatContext *os = output_files[ost->file_index]->ctx;
if (ost->is_past_recording_time ||
if (ost->finished ||
(os->pb && avio_tell(os->pb) >= of->limit_filesize))
continue;
if (ost->frame_number >= ost->max_frames) {
int j;
for (j = 0; j < of->ctx->nb_streams; j++)
output_streams[of->ost_index + j]->is_past_recording_time = 1;
output_streams[of->ost_index + j]->finished = 1;
continue;
}
@ -2424,7 +2425,7 @@ static int select_input_file(void)
for (i = 0; i < nb_output_streams; i++)
nb_active_out -= output_streams[i]->unavailable =
output_streams[i]->is_past_recording_time;
output_streams[i]->finished;
while (nb_active_out) {
opts_min = INT64_MAX;
ost_index = -1;
@ -3078,10 +3079,10 @@ int main(int argc, char **argv)
exit_program(1);
}
if (nb_input_files == 0) {
av_log(NULL, AV_LOG_FATAL, "At least one input file must be specified\n");
exit_program(1);
}
// if (nb_input_files == 0) {
// av_log(NULL, AV_LOG_FATAL, "At least one input file must be specified\n");
// exit_program(1);
// }
current_time = ti = getutime();
if (transcode() < 0)

View File

@ -306,7 +306,7 @@ typedef struct OutputStream {
int64_t swr_dither_method;
double swr_dither_scale;
AVDictionary *opts;
int is_past_recording_time;
int finished; /* no more packets should be written for this stream */
int unavailable; /* true if the steram is unavailable (possibly temporarily) */
int stream_copy;
const char *attachment_filename;

View File

@ -54,6 +54,7 @@ OBJS-$(CONFIG_MDCT) += mdct_fixed.o mdct_float.o
OBJS-$(CONFIG_MPEGAUDIODSP) += mpegaudiodsp.o \
mpegaudiodsp_fixed.o \
mpegaudiodsp_float.o
OBJS-$(CONFIG_MPEGVIDEO) += mpegvideo.o mpegvideo_motion.o
RDFT-OBJS-$(CONFIG_HARDCODED_TABLES) += sin_tables.o
OBJS-$(CONFIG_RDFT) += rdft.o $(RDFT-OBJS-yes)
OBJS-$(CONFIG_SINEWIN) += sinewin.o
@ -126,7 +127,7 @@ OBJS-$(CONFIG_BMV_VIDEO_DECODER) += bmv.o
OBJS-$(CONFIG_BMV_AUDIO_DECODER) += bmv.o
OBJS-$(CONFIG_C93_DECODER) += c93.o
OBJS-$(CONFIG_CAVS_DECODER) += cavs.o cavsdec.o cavsdsp.o \
mpeg12data.o mpegvideo.o
mpeg12data.o
OBJS-$(CONFIG_CDGRAPHICS_DECODER) += cdgraphics.o
OBJS-$(CONFIG_CDXL_DECODER) += cdxl.o
OBJS-$(CONFIG_CINEPAK_DECODER) += cinepak.o
@ -145,8 +146,7 @@ OBJS-$(CONFIG_DFA_DECODER) += dfa.o
OBJS-$(CONFIG_DNXHD_DECODER) += dnxhddec.o dnxhddata.o
OBJS-$(CONFIG_DNXHD_ENCODER) += dnxhdenc.o dnxhddata.o \
mpegvideo_enc.o motion_est.o \
ratecontrol.o mpeg12data.o \
mpegvideo.o
ratecontrol.o mpeg12data.o
OBJS-$(CONFIG_DPX_DECODER) += dpx.o
OBJS-$(CONFIG_DPX_ENCODER) += dpxenc.o
OBJS-$(CONFIG_DSICINAUDIO_DECODER) += dsicinav.o
@ -164,13 +164,11 @@ OBJS-$(CONFIG_EAC3_ENCODER) += eac3enc.o ac3enc.o ac3enc_float.o \
ac3tab.o ac3.o kbdwin.o eac3_data.o
OBJS-$(CONFIG_EACMV_DECODER) += eacmv.o
OBJS-$(CONFIG_EAMAD_DECODER) += eamad.o eaidct.o mpeg12.o \
mpeg12data.o mpegvideo.o \
error_resilience.o
mpeg12data.o error_resilience.o
OBJS-$(CONFIG_EATGQ_DECODER) += eatgq.o eaidct.o
OBJS-$(CONFIG_EATGV_DECODER) += eatgv.o
OBJS-$(CONFIG_EATQI_DECODER) += eatqi.o eaidct.o mpeg12.o \
mpeg12data.o mpegvideo.o \
error_resilience.o
mpeg12data.o error_resilience.o
OBJS-$(CONFIG_EIGHTBPS_DECODER) += 8bps.o
OBJS-$(CONFIG_EIGHTSVX_EXP_DECODER) += 8svx.o
OBJS-$(CONFIG_EIGHTSVX_FIB_DECODER) += 8svx.o
@ -200,27 +198,24 @@ OBJS-$(CONFIG_GIF_DECODER) += gifdec.o lzw.o
OBJS-$(CONFIG_GIF_ENCODER) += gif.o lzwenc.o
OBJS-$(CONFIG_GSM_DECODER) += gsmdec.o gsmdec_data.o msgsmdec.o
OBJS-$(CONFIG_GSM_MS_DECODER) += gsmdec.o gsmdec_data.o msgsmdec.o
OBJS-$(CONFIG_H261_DECODER) += h261dec.o h261.o h261data.o \
mpegvideo.o error_resilience.o
OBJS-$(CONFIG_H261_DECODER) += h261dec.o h261.o h261data.o error_resilience.o
OBJS-$(CONFIG_H261_ENCODER) += h261enc.o h261.o h261data.o \
mpegvideo_enc.o motion_est.o \
ratecontrol.o mpeg12data.o \
mpegvideo.o
ratecontrol.o mpeg12data.o
OBJS-$(CONFIG_H263_DECODER) += h263dec.o h263.o ituh263dec.o \
mpeg4video.o mpeg4videodec.o flvdec.o\
intelh263dec.o mpegvideo.o \
error_resilience.o
intelh263dec.o error_resilience.o
OBJS-$(CONFIG_H263_VAAPI_HWACCEL) += vaapi_mpeg4.o
OBJS-$(CONFIG_H263_ENCODER) += mpegvideo_enc.o mpeg4video.o \
mpeg4videoenc.o motion_est.o \
ratecontrol.o h263.o ituh263enc.o \
flvenc.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_H264_DECODER) += h264.o \
h264_loopfilter.o h264_direct.o \
cabac.o h264_sei.o h264_ps.o \
h264_refs.o h264_cavlc.o h264_cabac.o\
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_H264_DXVA2_HWACCEL) += dxva2_h264.o
OBJS-$(CONFIG_H264_VAAPI_HWACCEL) += vaapi_h264.o
OBJS-$(CONFIG_H264_VDA_HWACCEL) += vda_h264.o
@ -250,20 +245,18 @@ OBJS-$(CONFIG_KMVC_DECODER) += kmvc.o
OBJS-$(CONFIG_LAGARITH_DECODER) += lagarith.o lagarithrac.o
OBJS-$(CONFIG_LJPEG_ENCODER) += ljpegenc.o mjpegenc.o mjpeg.o \
mpegvideo_enc.o motion_est.o \
ratecontrol.o mpeg12data.o \
mpegvideo.o
ratecontrol.o mpeg12data.o
OBJS-$(CONFIG_LOCO_DECODER) += loco.o
OBJS-$(CONFIG_MACE3_DECODER) += mace.o
OBJS-$(CONFIG_MACE6_DECODER) += mace.o
OBJS-$(CONFIG_MDEC_DECODER) += mdec.o mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_MICRODVD_DECODER) += microdvddec.o ass.o
OBJS-$(CONFIG_MIMIC_DECODER) += mimic.o
OBJS-$(CONFIG_MJPEG_DECODER) += mjpegdec.o mjpeg.o
OBJS-$(CONFIG_MJPEG_ENCODER) += mjpegenc.o mjpeg.o \
mpegvideo_enc.o motion_est.o \
ratecontrol.o mpeg12data.o \
mpegvideo.o
ratecontrol.o mpeg12data.o
OBJS-$(CONFIG_MJPEGB_DECODER) += mjpegbdec.o mjpegdec.o mjpeg.o
OBJS-$(CONFIG_MLP_DECODER) += mlpdec.o mlpdsp.o
OBJS-$(CONFIG_MMVIDEO_DECODER) += mmvideo.o
@ -304,21 +297,21 @@ OBJS-$(CONFIG_MPEGVIDEO_DECODER) += mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
OBJS-$(CONFIG_MPEG_XVMC_DECODER) += mpegvideo_xvmc.o
OBJS-$(CONFIG_MPEG1VIDEO_DECODER) += mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_MPEG1VIDEO_ENCODER) += mpeg12enc.o mpegvideo_enc.o \
timecode.o \
motion_est.o ratecontrol.o \
mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_MPEG2_DXVA2_HWACCEL) += dxva2_mpeg2.o
OBJS-$(CONFIG_MPEG2_VAAPI_HWACCEL) += vaapi_mpeg2.o
OBJS-$(CONFIG_MPEG2VIDEO_DECODER) += mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_MPEG2VIDEO_ENCODER) += mpeg12enc.o mpegvideo_enc.o \
timecode.o \
motion_est.o ratecontrol.o \
mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_MPEG4_VAAPI_HWACCEL) += vaapi_mpeg4.o
OBJS-$(CONFIG_MSMPEG4V1_DECODER) += msmpeg4.o msmpeg4data.o
OBJS-$(CONFIG_MSMPEG4V2_DECODER) += msmpeg4.o msmpeg4data.o h263dec.o \
@ -400,9 +393,9 @@ OBJS-$(CONFIG_RV10_ENCODER) += rv10enc.o
OBJS-$(CONFIG_RV20_DECODER) += rv10.o
OBJS-$(CONFIG_RV20_ENCODER) += rv20enc.o
OBJS-$(CONFIG_RV30_DECODER) += rv30.o rv34.o rv30dsp.o rv34dsp.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_RV40_DECODER) += rv40.o rv34.o rv34dsp.o rv40dsp.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_SAMI_DECODER) += samidec.o ass.o
OBJS-$(CONFIG_S302M_DECODER) += s302m.o
OBJS-$(CONFIG_SANM_DECODER) += sanm.o
@ -419,7 +412,7 @@ OBJS-$(CONFIG_SMC_DECODER) += smc.o
OBJS-$(CONFIG_SNOW_DECODER) += snowdec.o snow.o rangecoder.o
OBJS-$(CONFIG_SNOW_ENCODER) += snowenc.o snow.o rangecoder.o \
motion_est.o ratecontrol.o \
h263.o mpegvideo.o \
h263.o \
error_resilience.o ituh263enc.o \
mpegvideo_enc.o mpeg12data.o
OBJS-$(CONFIG_SOL_DPCM_DECODER) += dpcm.o
@ -433,17 +426,17 @@ OBJS-$(CONFIG_SUBVIEWER_DECODER) += subviewerdec.o ass.o
OBJS-$(CONFIG_SUNRAST_DECODER) += sunrast.o
OBJS-$(CONFIG_SUNRAST_ENCODER) += sunrastenc.o
OBJS-$(CONFIG_SVQ1_DECODER) += svq1dec.o svq1.o h263.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_SVQ1_ENCODER) += svq1enc.o svq1.o \
motion_est.o h263.o \
mpegvideo.o error_resilience.o \
error_resilience.o \
ituh263enc.o mpegvideo_enc.o \
ratecontrol.o mpeg12data.o
OBJS-$(CONFIG_SVQ3_DECODER) += h264.o svq3.o \
h264_loopfilter.o h264_direct.o \
h264_sei.o h264_ps.o h264_refs.o \
h264_cavlc.o h264_cabac.o cabac.o \
mpegvideo.o error_resilience.o \
error_resilience.o \
svq1dec.o svq1.o h263.o
OBJS-$(CONFIG_TARGA_DECODER) += targa.o
OBJS-$(CONFIG_TARGA_ENCODER) += targaenc.o rle.o
@ -652,7 +645,7 @@ OBJS-$(CONFIG_MP2_MUXER) += mpegaudiodata.o mpegaudiodecheader.o
OBJS-$(CONFIG_MP3_MUXER) += mpegaudiodata.o mpegaudiodecheader.o
OBJS-$(CONFIG_MOV_DEMUXER) += mpeg4audio.o mpegaudiodata.o ac3tab.o timecode.o
OBJS-$(CONFIG_MOV_MUXER) += mpeg4audio.o mpegaudiodata.o
OBJS-$(CONFIG_MPEGTS_MUXER) += mpegvideo.o mpeg4audio.o
OBJS-$(CONFIG_MPEGTS_MUXER) += mpeg4audio.o
OBJS-$(CONFIG_MPEGTS_DEMUXER) += mpeg4audio.o mpegaudiodata.o
OBJS-$(CONFIG_MXF_MUXER) += timecode.o
OBJS-$(CONFIG_NUT_MUXER) += mpegaudiodata.o
@ -661,7 +654,7 @@ OBJS-$(CONFIG_OGG_DEMUXER) += xiph.o flac.o flacdata.o \
dirac.o vorbis_data.o
OBJS-$(CONFIG_OGG_MUXER) += xiph.o flac.o flacdata.o \
vorbis_data.o
OBJS-$(CONFIG_RTP_MUXER) += mpeg4audio.o mpegvideo.o xiph.o
OBJS-$(CONFIG_RTP_MUXER) += mpeg4audio.o xiph.o
OBJS-$(CONFIG_SPDIF_DEMUXER) += aacadtsdec.o mpeg4audio.o
OBJS-$(CONFIG_SPDIF_MUXER) += dca.o
OBJS-$(CONFIG_WEBM_MUXER) += mpeg4audio.o mpegaudiodata.o \
@ -737,12 +730,12 @@ OBJS-$(CONFIG_H264_PARSER) += h264_parser.o h264.o \
h264_refs.o h264_sei.o h264_direct.o \
h264_loopfilter.o h264_cabac.o \
h264_cavlc.o h264_ps.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_AAC_LATM_PARSER) += latm_parser.o
OBJS-$(CONFIG_MJPEG_PARSER) += mjpeg_parser.o
OBJS-$(CONFIG_MLP_PARSER) += mlp_parser.o mlp.o
OBJS-$(CONFIG_MPEG4VIDEO_PARSER) += mpeg4video_parser.o h263.o \
mpegvideo.o error_resilience.o \
error_resilience.o \
mpeg4videodec.o mpeg4video.o \
ituh263dec.o h263dec.o
OBJS-$(CONFIG_PNG_PARSER) += png_parser.o
@ -750,13 +743,13 @@ OBJS-$(CONFIG_MPEGAUDIO_PARSER) += mpegaudio_parser.o \
mpegaudiodecheader.o mpegaudiodata.o
OBJS-$(CONFIG_MPEGVIDEO_PARSER) += mpegvideo_parser.o \
mpeg12.o mpeg12data.o \
mpegvideo.o error_resilience.o
error_resilience.o
OBJS-$(CONFIG_PNM_PARSER) += pnm_parser.o pnm.o
OBJS-$(CONFIG_RV30_PARSER) += rv34_parser.o
OBJS-$(CONFIG_RV40_PARSER) += rv34_parser.o
OBJS-$(CONFIG_VC1_PARSER) += vc1_parser.o vc1.o vc1data.o \
msmpeg4.o msmpeg4data.o mpeg4video.o \
h263.o mpegvideo.o error_resilience.o
h263.o error_resilience.o
OBJS-$(CONFIG_VORBIS_PARSER) += vorbis_parser.o xiph.o
OBJS-$(CONFIG_VP3_PARSER) += vp3_parser.o
OBJS-$(CONFIG_VP8_PARSER) += vp8_parser.o

View File

@ -369,12 +369,10 @@ static void push_output_configuration(AACContext *ac) {
* configuration is unlocked.
*/
static void pop_output_configuration(AACContext *ac) {
if (ac->oc[1].status != OC_LOCKED) {
if (ac->oc[0].status == OC_LOCKED) {
ac->oc[1] = ac->oc[0];
ac->avctx->channels = ac->oc[1].channels;
ac->avctx->channel_layout = ac->oc[1].channel_layout;
}
if (ac->oc[1].status != OC_LOCKED && ac->oc[0].status != OC_NONE) {
ac->oc[1] = ac->oc[0];
ac->avctx->channels = ac->oc[1].channels;
ac->avctx->channel_layout = ac->oc[1].channel_layout;
}
}

View File

@ -31,7 +31,6 @@
#include "dsputil.h"
#include "internal.h"
#include "mpegvideo.h"
#include "mpegvideo_common.h"
#include "dnxhdenc.h"
#include "internal.h"

View File

@ -138,6 +138,13 @@ if ((y) < (x)) {\
}
#endif
#ifndef MASK_ABS
#define MASK_ABS(mask, level) do { \
mask = level >> 31; \
level = (level ^ mask) - mask; \
} while (0)
#endif
#ifndef NEG_SSR32
# define NEG_SSR32(a,s) ((( int32_t)(a))>>(32-(s)))
#endif

View File

@ -895,7 +895,7 @@ static void mpeg1_encode_block(MpegEncContext *s,
run = i - last_non_zero - 1;
alevel= level;
MASK_ABS(sign, alevel)
MASK_ABS(sign, alevel);
sign&=1;
if (alevel <= mpeg1_max_level[0][run]){

View File

@ -33,7 +33,6 @@
#include "dsputil.h"
#include "internal.h"
#include "mpegvideo.h"
#include "mpegvideo_common.h"
#include "mjpegenc.h"
#include "msmpeg4.h"
#include "xvmc_internal.h"
@ -2388,12 +2387,12 @@ void MPV_decode_mb_internal(MpegEncContext *s, DCTELEM block[12][64],
op_pix = s->dsp.put_no_rnd_pixels_tab;
}
if (s->mv_dir & MV_DIR_FORWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
op_pix = s->dsp.avg_pixels_tab;
op_qpix= s->me.qpel_avg;
}
if (s->mv_dir & MV_DIR_BACKWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
}
}
}

View File

@ -748,6 +748,13 @@ static const AVClass name ## _class = {\
.version = LIBAVUTIL_VERSION_INT,\
};
/**
* Set the given MpegEncContext to common defaults (same for encoding
* and decoding). The changed fields will not depend upon the prior
* state of the MpegEncContext.
*/
void ff_MPV_common_defaults(MpegEncContext *s);
void ff_MPV_decode_defaults(MpegEncContext *s);
int ff_MPV_common_init(MpegEncContext *s);
void ff_MPV_common_end(MpegEncContext *s);
@ -786,10 +793,18 @@ void ff_er_add_slice(MpegEncContext *s, int startx, int starty, int endx, int en
int ff_dct_common_init(MpegEncContext *s);
void ff_convert_matrix(DSPContext *dsp, int (*qmat)[64], uint16_t (*qmat16)[2][64],
const uint16_t *quant_matrix, int bias, int qmin, int qmax, int intra);
int ff_dct_quantize_c(MpegEncContext *s, DCTELEM *block, int n, int qscale, int *overflow);
void ff_init_block_index(MpegEncContext *s);
void ff_copy_picture(Picture *dst, Picture *src);
void ff_MPV_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb,
uint8_t *dest_cr, int dir,
uint8_t **ref_picture,
op_pixels_func (*pix_op)[4],
qpel_mc_func (*qpix_op)[16]);
/**
* Allocate a Picture.
* The pixels are allocated/set by calling get_buffer() if shared = 0.

View File

@ -33,7 +33,6 @@
#include "avcodec.h"
#include "dsputil.h"
#include "mpegvideo.h"
#include "mpegvideo_common.h"
#include "h263.h"
#include "mjpegenc.h"
#include "msmpeg4.h"
@ -1897,14 +1896,16 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s,
}
if (s->mv_dir & MV_DIR_FORWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data,
op_pix, op_qpix);
ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0,
s->last_picture.f.data,
op_pix, op_qpix);
op_pix = s->dsp.avg_pixels_tab;
op_qpix = s->dsp.avg_qpel_pixels_tab;
}
if (s->mv_dir & MV_DIR_BACKWARD) {
MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data,
op_pix, op_qpix);
ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1,
s->next_picture.f.data,
op_pix, op_qpix);
}
if (s->flags & CODEC_FLAG_INTERLACED_DCT) {

View File

@ -1,5 +1,4 @@
/*
* The simplest mpeg encoder (well, it was the simplest!)
* Copyright (c) 2000,2001 Fabrice Bellard
* Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
*
@ -22,14 +21,6 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* The simplest mpeg encoder (well, it was the simplest!).
*/
#ifndef AVCODEC_MPEGVIDEO_COMMON_H
#define AVCODEC_MPEGVIDEO_COMMON_H
#include <string.h>
#include "libavutil/avassert.h"
#include "avcodec.h"
@ -39,14 +30,6 @@
#include "msmpeg4.h"
#include <limits.h>
int ff_dct_quantize_c(MpegEncContext *s, DCTELEM *block, int n, int qscale, int *overflow);
/**
* Set the given MpegEncContext to common defaults (same for encoding and decoding).
* The changed fields will not depend upon the prior state of the MpegEncContext.
*/
void ff_MPV_common_defaults(MpegEncContext *s);
static inline void gmc1_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr,
uint8_t **ref_picture)
@ -357,21 +340,39 @@ if(s->quarter_sample)
}
}
/* apply one mpeg motion vector to the three components */
static av_always_inline
void mpeg_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr,
int field_based, int bottom_field, int field_select,
uint8_t **ref_picture, op_pixels_func (*pix_op)[4],
int motion_x, int motion_y, int h, int mb_y)
static void mpeg_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr,
int field_select, uint8_t **ref_picture,
op_pixels_func (*pix_op)[4],
int motion_x, int motion_y, int h, int mb_y)
{
#if !CONFIG_SMALL
if(s->out_format == FMT_MPEG1)
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, field_based,
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, 0, 0,
field_select, ref_picture, pix_op,
motion_x, motion_y, h, 1, mb_y);
else
#endif
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, 0, 0,
field_select, ref_picture, pix_op,
motion_x, motion_y, h, 0, mb_y);
}
static void mpeg_motion_field(MpegEncContext *s, uint8_t *dest_y,
uint8_t *dest_cb, uint8_t *dest_cr,
int bottom_field, int field_select,
uint8_t **ref_picture,
op_pixels_func (*pix_op)[4],
int motion_x, int motion_y, int h, int mb_y)
{
#if !CONFIG_SMALL
if(s->out_format == FMT_MPEG1)
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, 1,
bottom_field, field_select, ref_picture, pix_op,
motion_x, motion_y, h, 1, mb_y);
else
#endif
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, field_based,
mpeg_motion_internal(s, dest_y, dest_cb, dest_cr, 1,
bottom_field, field_select, ref_picture, pix_op,
motion_x, motion_y, h, 0, mb_y);
}
@ -726,8 +727,7 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
s->mv[dir][0][0], s->mv[dir][0][1], 16);
}else
{
mpeg_motion(s, dest_y, dest_cb, dest_cr,
0, 0, 0,
mpeg_motion(s, dest_y, dest_cb, dest_cr, 0,
ref_picture, pix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 16, mb_y);
}
@ -800,15 +800,15 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
}
}else{
/* top field */
mpeg_motion(s, dest_y, dest_cb, dest_cr,
1, 0, s->field_select[dir][0],
ref_picture, pix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 8, mb_y);
mpeg_motion_field(s, dest_y, dest_cb, dest_cr,
0, s->field_select[dir][0],
ref_picture, pix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 8, mb_y);
/* bottom field */
mpeg_motion(s, dest_y, dest_cb, dest_cr,
1, 1, s->field_select[dir][1],
ref_picture, pix_op,
s->mv[dir][1][0], s->mv[dir][1][1], 8, mb_y);
mpeg_motion_field(s, dest_y, dest_cb, dest_cr,
1, s->field_select[dir][1],
ref_picture, pix_op,
s->mv[dir][1][0], s->mv[dir][1][1], 8, mb_y);
}
} else {
if(s->picture_structure != s->field_select[dir][0] + 1 && s->pict_type != AV_PICTURE_TYPE_B && !s->first_field){
@ -816,7 +816,7 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
}
mpeg_motion(s, dest_y, dest_cb, dest_cr,
0, 0, s->field_select[dir][0],
s->field_select[dir][0],
ref_picture, pix_op,
s->mv[dir][0][0], s->mv[dir][0][1], 16, mb_y>>1);
}
@ -833,7 +833,7 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
}
mpeg_motion(s, dest_y, dest_cb, dest_cr,
0, 0, s->field_select[dir][i],
s->field_select[dir][i],
ref2picture, pix_op,
s->mv[dir][i][0], s->mv[dir][i][1] + 16*i, 8, mb_y>>1);
@ -847,17 +847,17 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
for(i=0; i<2; i++){
int j;
for(j=0; j<2; j++){
mpeg_motion(s, dest_y, dest_cb, dest_cr,
1, j, j^i,
ref_picture, pix_op,
s->mv[dir][2*i + j][0], s->mv[dir][2*i + j][1], 8, mb_y);
mpeg_motion_field(s, dest_y, dest_cb, dest_cr,
j, j^i, ref_picture, pix_op,
s->mv[dir][2*i + j][0],
s->mv[dir][2*i + j][1], 8, mb_y);
}
pix_op = s->dsp.avg_pixels_tab;
}
}else{
for(i=0; i<2; i++){
mpeg_motion(s, dest_y, dest_cb, dest_cr,
0, 0, s->picture_structure != i+1,
s->picture_structure != i+1,
ref_picture, pix_op,
s->mv[dir][2*i][0],s->mv[dir][2*i][1],16, mb_y>>1);
@ -875,12 +875,12 @@ static av_always_inline void MPV_motion_internal(MpegEncContext *s,
}
}
static inline void MPV_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb,
uint8_t *dest_cr, int dir,
uint8_t **ref_picture,
op_pixels_func (*pix_op)[4],
qpel_mc_func (*qpix_op)[16])
void ff_MPV_motion(MpegEncContext *s,
uint8_t *dest_y, uint8_t *dest_cb,
uint8_t *dest_cr, int dir,
uint8_t **ref_picture,
op_pixels_func (*pix_op)[4],
qpel_mc_func (*qpix_op)[16])
{
#if !CONFIG_SMALL
if(s->out_format == FMT_MPEG1)
@ -891,4 +891,3 @@ static inline void MPV_motion(MpegEncContext *s,
MPV_motion_internal(s, dest_y, dest_cb, dest_cr, dir,
ref_picture, pix_op, qpix_op, 0);
}
#endif /* AVCODEC_MPEGVIDEO_COMMON_H */

View File

@ -31,7 +31,7 @@
#include "dsputil.h"
#include "mpegvideo.h"
#include "msmpeg4.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "h263.h"
#include "mpeg4video.h"
#include "msmpeg4data.h"

View File

@ -19,7 +19,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "dsputil_mmx.h"
#include "libavcodec/ac3dsp.h"

View File

@ -23,7 +23,7 @@
#include "libavcodec/cabac.h"
#include "libavutil/attributes.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/internal.h"
#include "config.h"

View File

@ -24,7 +24,7 @@
#include "libavutil/common.h"
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/cavsdsp.h"
#include "dsputil_mmx.h"

View File

@ -21,7 +21,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dnxhdenc.h"
#if HAVE_INLINE_ASM

View File

@ -23,7 +23,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/h264dsp.h"
#include "libavcodec/mpegvideo.h"

View File

@ -24,7 +24,7 @@
#include <stdint.h>
#include "libavcodec/dsputil.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
typedef struct { uint64_t a, b; } xmm_reg;

View File

@ -23,7 +23,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/mpegvideo.h"
#include "libavcodec/mathops.h"

View File

@ -20,7 +20,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "dsputil_mmx.h"
#include "dwt.h"

View File

@ -31,7 +31,7 @@
*/
#include "libavutil/common.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#if HAVE_INLINE_ASM

View File

@ -23,7 +23,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/fmtconvert.h"
#include "libavcodec/dsputil.h"

View File

@ -19,7 +19,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/h264dsp.h"
#include "dsputil_mmx.h"

View File

@ -22,7 +22,7 @@
#include "libavutil/common.h"
#include "libavcodec/dsputil.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "dsputil_mmx.h"
#if HAVE_INLINE_ASM

View File

@ -39,7 +39,7 @@
*/
#include "libavcodec/dsputil.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "idct_xvid.h"
#include "dsputil_mmx.h"

View File

@ -19,7 +19,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#include "libavcodec/lpc.h"

View File

@ -101,6 +101,12 @@ __asm__ volatile(\
);
#endif
#define MASK_ABS(mask, level) \
__asm__ ("cltd \n\t" \
"xorl %1, %0 \n\t" \
"subl %1, %0 \n\t" \
: "+a"(level), "=&d"(mask))
// avoid +32 for shift optimization (gcc should do that ...)
#define NEG_SSR32 NEG_SSR32
static inline int32_t NEG_SSR32( int32_t a, int8_t s){

View File

@ -19,7 +19,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/mlp.h"

View File

@ -22,7 +22,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "dsputil_mmx.h"

View File

@ -20,7 +20,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/mpegaudiodsp.h"

View File

@ -23,7 +23,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/avcodec.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/mpegvideo.h"

View File

@ -20,7 +20,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/rv34dsp.h"

View File

@ -20,7 +20,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/avcodec.h"
#include "libavcodec/snow.h"
#include "libavcodec/dwt.h"

View File

@ -25,7 +25,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "dsputil_mmx.h"
#include "libavcodec/vc1dsp.h"

View File

@ -21,7 +21,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/dsputil.h"
#include "libavcodec/vp56dsp.h"

View File

@ -21,7 +21,7 @@
*/
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/vp8dsp.h"
#if HAVE_YASM

View File

@ -20,7 +20,7 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavfilter/gradfun.h"
#if HAVE_INLINE_ASM

View File

@ -20,7 +20,7 @@
#include "libavutil/attributes.h"
#include "libavutil/cpu.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavcodec/x86/dsputil_mmx.h"
#include "libavfilter/yadif.h"

View File

@ -52,15 +52,17 @@
typedef enum {
STATE_START, ///< client has not done anything yet
STATE_HANDSHAKED, ///< client has performed handshake
STATE_RELEASING, ///< client releasing stream before publish it (for output)
STATE_FCPUBLISH, ///< client FCPublishing stream (for output)
STATE_CONNECTING, ///< client connected to server successfully
STATE_READY, ///< client has sent all needed commands and waits for server reply
STATE_PLAYING, ///< client has started receiving multimedia data from server
STATE_PUBLISHING, ///< client has started sending multimedia data to server (for output)
STATE_STOPPED, ///< the broadcast has been stopped
} ClientState;
typedef struct TrackedMethod {
char *name;
int id;
} TrackedMethod;
/** protocol handler context */
typedef struct RTMPContext {
const AVClass *class;
@ -86,7 +88,6 @@ typedef struct RTMPContext {
uint8_t flv_header[11]; ///< partial incoming flv packet header
int flv_header_bytes; ///< number of initialized bytes in flv_header
int nb_invokes; ///< keeps track of invoke messages
int create_stream_invoke; ///< invoke id for the create stream command
char* tcurl; ///< url of the target stream
char* flashver; ///< version of the flash plugin
char* swfurl; ///< url of the swf player
@ -96,6 +97,9 @@ typedef struct RTMPContext {
int client_buffer_time; ///< client buffer time in ms
int flush_interval; ///< number of packets flushed in the same request (RTMPT only)
int encrypted; ///< use an encrypted connection (RTMPE only)
TrackedMethod*tracked_methods; ///< tracked methods buffer
int nb_tracked_methods; ///< number of tracked methods
int tracked_methods_size; ///< size of the tracked methods buffer
} RTMPContext;
#define PLAYER_KEY_OPEN_PART_LEN 30 ///< length of partial key used for first client digest signing
@ -121,6 +125,72 @@ static const uint8_t rtmp_server_key[] = {
0xE6, 0x36, 0xCF, 0xEB, 0x31, 0xAE
};
static int add_tracked_method(RTMPContext *rt, const char *name, int id)
{
void *ptr;
if (rt->nb_tracked_methods + 1 > rt->tracked_methods_size) {
rt->tracked_methods_size = (rt->nb_tracked_methods + 1) * 2;
ptr = av_realloc(rt->tracked_methods,
rt->tracked_methods_size * sizeof(*rt->tracked_methods));
if (!ptr)
return AVERROR(ENOMEM);
rt->tracked_methods = ptr;
}
rt->tracked_methods[rt->nb_tracked_methods].name = av_strdup(name);
if (!rt->tracked_methods[rt->nb_tracked_methods].name)
return AVERROR(ENOMEM);
rt->tracked_methods[rt->nb_tracked_methods].id = id;
rt->nb_tracked_methods++;
return 0;
}
static void del_tracked_method(RTMPContext *rt, int index)
{
memmove(&rt->tracked_methods[index], &rt->tracked_methods[index + 1],
sizeof(*rt->tracked_methods) * (rt->nb_tracked_methods - index - 1));
rt->nb_tracked_methods--;
}
static void free_tracked_methods(RTMPContext *rt)
{
int i;
for (i = 0; i < rt->nb_tracked_methods; i ++)
av_free(rt->tracked_methods[i].name);
av_free(rt->tracked_methods);
}
static int rtmp_send_packet(RTMPContext *rt, RTMPPacket *pkt, int track)
{
int ret;
if (pkt->type == RTMP_PT_INVOKE && track) {
GetByteContext gbc;
char name[128];
double pkt_id;
int len;
bytestream2_init(&gbc, pkt->data, pkt->data_size);
if ((ret = ff_amf_read_string(&gbc, name, sizeof(name), &len)) < 0)
goto fail;
if ((ret = ff_amf_read_number(&gbc, &pkt_id)) < 0)
goto fail;
if ((ret = add_tracked_method(rt, name, pkt_id)) < 0)
goto fail;
}
ret = ff_rtmp_packet_write(rt->stream, pkt, rt->chunk_size,
rt->prev_pkt[1]);
fail:
ff_rtmp_packet_destroy(pkt);
return ret;
}
static int rtmp_write_amf_data(URLContext *s, char *param, uint8_t **p)
{
char *field, *value;
@ -269,11 +339,7 @@ static int gen_connect(URLContext *s, RTMPContext *rt)
pkt.data_size = p - pkt.data;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 1);
}
/**
@ -297,11 +363,7 @@ static int gen_release_stream(URLContext *s, RTMPContext *rt)
ff_amf_write_null(&p);
ff_amf_write_string(&p, rt->playpath);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -325,11 +387,7 @@ static int gen_fcpublish_stream(URLContext *s, RTMPContext *rt)
ff_amf_write_null(&p);
ff_amf_write_string(&p, rt->playpath);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -353,11 +411,7 @@ static int gen_fcunpublish_stream(URLContext *s, RTMPContext *rt)
ff_amf_write_null(&p);
ff_amf_write_string(&p, rt->playpath);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -380,13 +434,8 @@ static int gen_create_stream(URLContext *s, RTMPContext *rt)
ff_amf_write_string(&p, "createStream");
ff_amf_write_number(&p, ++rt->nb_invokes);
ff_amf_write_null(&p);
rt->create_stream_invoke = rt->nb_invokes;
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 1);
}
@ -412,11 +461,7 @@ static int gen_delete_stream(URLContext *s, RTMPContext *rt)
ff_amf_write_null(&p);
ff_amf_write_number(&p, rt->main_channel_id);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -437,11 +482,7 @@ static int gen_buffer_time(URLContext *s, RTMPContext *rt)
bytestream_put_be32(&p, rt->main_channel_id);
bytestream_put_be32(&p, rt->client_buffer_time);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -469,11 +510,7 @@ static int gen_play(URLContext *s, RTMPContext *rt)
ff_amf_write_string(&p, rt->playpath);
ff_amf_write_number(&p, rt->live);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 1);
}
/**
@ -500,11 +537,7 @@ static int gen_publish(URLContext *s, RTMPContext *rt)
ff_amf_write_string(&p, rt->playpath);
ff_amf_write_string(&p, "live");
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 1);
}
/**
@ -529,11 +562,8 @@ static int gen_pong(URLContext *s, RTMPContext *rt, RTMPPacket *ppkt)
p = pkt.data;
bytestream_put_be16(&p, 7);
bytestream_put_be32(&p, AV_RB32(ppkt->data+2));
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -551,11 +581,8 @@ static int gen_server_bw(URLContext *s, RTMPContext *rt)
p = pkt.data;
bytestream_put_be32(&p, rt->server_bw);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -576,11 +603,7 @@ static int gen_check_bw(URLContext *s, RTMPContext *rt)
ff_amf_write_number(&p, RTMP_NOTIFICATION);
ff_amf_write_null(&p);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
/**
@ -598,11 +621,8 @@ static int gen_bytes_read(URLContext *s, RTMPContext *rt, uint32_t ts)
p = pkt.data;
bytestream_put_be32(&p, rt->bytes_read);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 0);
}
static int gen_fcsubscribe_stream(URLContext *s, RTMPContext *rt,
@ -622,11 +642,7 @@ static int gen_fcsubscribe_stream(URLContext *s, RTMPContext *rt,
ff_amf_write_null(&p);
ff_amf_write_string(&p, subscribe);
ret = ff_rtmp_packet_write(rt->stream, &pkt, rt->chunk_size,
rt->prev_pkt[1]);
ff_rtmp_packet_destroy(&pkt);
return ret;
return rtmp_send_packet(rt, &pkt, 1);
}
int ff_rtmp_calc_digest(const uint8_t *src, int len, int gap,
@ -1010,7 +1026,8 @@ static int handle_invoke(URLContext *s, RTMPPacket *pkt)
RTMPContext *rt = s->priv_data;
int i, t;
const uint8_t *data_end = pkt->data + pkt->data_size;
int ret;
char *tracked_method = NULL;
int ret = 0;
//TODO: check for the messages sent for wrong state?
if (!memcmp(pkt->data, "\002\000\006_error", 9)) {
@ -1021,68 +1038,72 @@ static int handle_invoke(URLContext *s, RTMPPacket *pkt)
av_log(s, AV_LOG_ERROR, "Server error: %s\n",tmpstr);
return -1;
} else if (!memcmp(pkt->data, "\002\000\007_result", 10)) {
switch (rt->state) {
case STATE_HANDSHAKED:
if (!rt->is_input) {
if ((ret = gen_release_stream(s, rt)) < 0)
return ret;
if ((ret = gen_fcpublish_stream(s, rt)) < 0)
return ret;
rt->state = STATE_RELEASING;
} else {
if ((ret = gen_server_bw(s, rt)) < 0)
return ret;
rt->state = STATE_CONNECTING;
}
if ((ret = gen_create_stream(s, rt)) < 0)
return ret;
GetByteContext gbc;
double pkt_id;
if (rt->is_input) {
/* Send the FCSubscribe command when the name of live
* stream is defined by the user or if it's a live stream. */
if (rt->subscribe) {
if ((ret = gen_fcsubscribe_stream(s, rt,
rt->subscribe)) < 0)
return ret;
} else if (rt->live == -1) {
if ((ret = gen_fcsubscribe_stream(s, rt,
rt->playpath)) < 0)
return ret;
}
bytestream2_init(&gbc, pkt->data + 10, pkt->data_size);
if ((ret = ff_amf_read_number(&gbc, &pkt_id)) < 0)
return ret;
for (i = 0; i < rt->nb_tracked_methods; i++) {
if (rt->tracked_methods[i].id != pkt_id)
continue;
tracked_method = rt->tracked_methods[i].name;
del_tracked_method(rt, i);
break;
}
if (!tracked_method) {
/* Ignore this reply when the current method is not tracked. */
return 0;
}
if (!memcmp(tracked_method, "connect", 7)) {
if (!rt->is_input) {
if ((ret = gen_release_stream(s, rt)) < 0)
goto invoke_fail;
if ((ret = gen_fcpublish_stream(s, rt)) < 0)
goto invoke_fail;
} else {
if ((ret = gen_server_bw(s, rt)) < 0)
goto invoke_fail;
}
if ((ret = gen_create_stream(s, rt)) < 0)
goto invoke_fail;
if (rt->is_input) {
/* Send the FCSubscribe command when the name of live
* stream is defined by the user or if it's a live stream. */
if (rt->subscribe) {
if ((ret = gen_fcsubscribe_stream(s, rt,
rt->subscribe)) < 0)
goto invoke_fail;
} else if (rt->live == -1) {
if ((ret = gen_fcsubscribe_stream(s, rt,
rt->playpath)) < 0)
goto invoke_fail;
}
break;
case STATE_FCPUBLISH:
rt->state = STATE_CONNECTING;
break;
case STATE_RELEASING:
rt->state = STATE_FCPUBLISH;
/* hack for Wowza Media Server, it does not send result for
* releaseStream and FCPublish calls */
if (!pkt->data[10]) {
int pkt_id = av_int2double(AV_RB64(pkt->data + 11));
if (pkt_id == rt->create_stream_invoke)
rt->state = STATE_CONNECTING;
}
if (rt->state != STATE_CONNECTING)
break;
case STATE_CONNECTING:
//extract a number from the result
if (pkt->data[10] || pkt->data[19] != 5 || pkt->data[20]) {
av_log(s, AV_LOG_WARNING, "Unexpected reply on connect()\n");
} else {
rt->main_channel_id = av_int2double(AV_RB64(pkt->data + 21));
}
if (rt->is_input) {
if ((ret = gen_play(s, rt)) < 0)
return ret;
if ((ret = gen_buffer_time(s, rt)) < 0)
return ret;
} else {
if ((ret = gen_publish(s, rt)) < 0)
return ret;
}
rt->state = STATE_READY;
break;
}
} else if (!memcmp(tracked_method, "createStream", 12)) {
//extract a number from the result
if (pkt->data[10] || pkt->data[19] != 5 || pkt->data[20]) {
av_log(s, AV_LOG_WARNING, "Unexpected reply on connect()\n");
} else {
rt->main_channel_id = av_int2double(AV_RB64(pkt->data + 21));
}
if (!rt->is_input) {
if ((ret = gen_publish(s, rt)) < 0)
goto invoke_fail;
} else {
if ((ret = gen_play(s, rt)) < 0)
goto invoke_fail;
if ((ret = gen_buffer_time(s, rt)) < 0)
goto invoke_fail;
}
}
} else if (!memcmp(pkt->data, "\002\000\010onStatus", 11)) {
const uint8_t* ptr = pkt->data + 11;
@ -1113,7 +1134,9 @@ static int handle_invoke(URLContext *s, RTMPPacket *pkt)
return ret;
}
return 0;
invoke_fail:
av_free(tracked_method);
return ret;
}
/**
@ -1283,6 +1306,7 @@ static int rtmp_close(URLContext *h)
if (rt->state > STATE_HANDSHAKED)
ret = gen_delete_stream(h, rt);
free_tracked_methods(rt);
av_freep(&rt->flv_data);
ffurl_close(rt->stream);
return ret;
@ -1570,10 +1594,8 @@ static int rtmp_write(URLContext *s, const uint8_t *buf, int size)
if (rt->flv_off == rt->flv_size) {
rt->skip_bytes = 4;
if ((ret = ff_rtmp_packet_write(rt->stream, &rt->out_pkt,
rt->chunk_size, rt->prev_pkt[1])) < 0)
if ((ret = rtmp_send_packet(rt, &rt->out_pkt, 0)) < 0)
return ret;
ff_rtmp_packet_destroy(&rt->out_pkt);
rt->flv_size = 0;
rt->flv_off = 0;
rt->flv_header_bytes = 0;

View File

@ -37,7 +37,6 @@
#include "libavutil/avstring.h"
#include "libavcodec/get_bits.h"
#include "avformat.h"
#include "mpegts.h"
#include "network.h"
#include <assert.h>

View File

@ -281,8 +281,8 @@ void ff_rtp_send_data(AVFormatContext *s1, const uint8_t *buf1, int len, int m)
/* send an integer number of samples and compute time stamp and fill
the rtp send buffer before sending. */
static void rtp_send_samples(AVFormatContext *s1,
const uint8_t *buf1, int size, int sample_size_bits)
static int rtp_send_samples(AVFormatContext *s1,
const uint8_t *buf1, int size, int sample_size_bits)
{
RTPMuxContext *s = s1->priv_data;
int len, max_packet_size, n;
@ -292,7 +292,7 @@ static void rtp_send_samples(AVFormatContext *s1,
max_packet_size = (s->max_payload_size / aligned_samples_size) * aligned_samples_size;
/* Not needed, but who knows. Don't check if samples aren't an even number of bytes. */
if ((sample_size_bits % 8) == 0 && ((8 * size) % sample_size_bits) != 0)
av_abort();
return AVERROR(EINVAL);
n = 0;
while (size > 0) {
s->buf_ptr = s->buf;
@ -307,6 +307,7 @@ static void rtp_send_samples(AVFormatContext *s1,
ff_rtp_send_data(s1, s->buf, s->buf_ptr - s->buf, 0);
n += (s->buf_ptr - s->buf);
}
return 0;
}
static void rtp_send_mpegaudio(AVFormatContext *s1,
@ -461,25 +462,21 @@ static int rtp_write_packet(AVFormatContext *s1, AVPacket *pkt)
case AV_CODEC_ID_PCM_ALAW:
case AV_CODEC_ID_PCM_U8:
case AV_CODEC_ID_PCM_S8:
rtp_send_samples(s1, pkt->data, size, 8 * st->codec->channels);
break;
return rtp_send_samples(s1, pkt->data, size, 8 * st->codec->channels);
case AV_CODEC_ID_PCM_U16BE:
case AV_CODEC_ID_PCM_U16LE:
case AV_CODEC_ID_PCM_S16BE:
case AV_CODEC_ID_PCM_S16LE:
rtp_send_samples(s1, pkt->data, size, 16 * st->codec->channels);
break;
return rtp_send_samples(s1, pkt->data, size, 16 * st->codec->channels);
case AV_CODEC_ID_ADPCM_G722:
/* The actual sample size is half a byte per sample, but since the
* stream clock rate is 8000 Hz while the sample rate is 16000 Hz,
* the correct parameter for send_samples_bits is 8 bits per stream
* clock. */
rtp_send_samples(s1, pkt->data, size, 8 * st->codec->channels);
break;
return rtp_send_samples(s1, pkt->data, size, 8 * st->codec->channels);
case AV_CODEC_ID_ADPCM_G726:
rtp_send_samples(s1, pkt->data, size,
st->codec->bits_per_coded_sample * st->codec->channels);
break;
return rtp_send_samples(s1, pkt->data, size,
st->codec->bits_per_coded_sample * st->codec->channels);
case AV_CODEC_ID_MP2:
case AV_CODEC_ID_MP3:
rtp_send_mpegaudio(s1, pkt->data, size);

View File

@ -46,6 +46,7 @@
#include "rtpenc_chain.h"
#include "url.h"
#include "rtpenc.h"
#include "mpegts.h"
//#define DEBUG
@ -370,7 +371,9 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
get_word(buf1, sizeof(buf1), &p); /* port */
rtsp_st->sdp_port = atoi(buf1);
get_word(buf1, sizeof(buf1), &p); /* protocol (ignored) */
get_word(buf1, sizeof(buf1), &p); /* protocol */
if (!strcmp(buf1, "udp"))
rt->transport = RTSP_TRANSPORT_RAW;
/* XXX: handle list of formats */
get_word(buf1, sizeof(buf1), &p); /* format list */
@ -378,6 +381,8 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
if (!strcmp(ff_rtp_enc_name(rtsp_st->sdp_payload_type), "MP2T")) {
/* no corresponding stream */
if (rt->transport == RTSP_TRANSPORT_RAW && !rt->ts && CONFIG_RTPDEC)
rt->ts = ff_mpegts_parse_open(s);
} else if (rt->server_type == RTSP_SERVER_WMS &&
codec_type == AVMEDIA_TYPE_DATA) {
/* RTX stream, a stream that carries all the other actual
@ -563,7 +568,7 @@ void ff_rtsp_undo_setup(AVFormatContext *s)
avformat_free_context(rtpctx);
} else if (rt->transport == RTSP_TRANSPORT_RDT && CONFIG_RTPDEC)
ff_rdt_parse_close(rtsp_st->transport_priv);
else if (CONFIG_RTPDEC)
else if (rt->transport == RTSP_TRANSPORT_RAW && CONFIG_RTPDEC)
ff_rtp_parse_close(rtsp_st->transport_priv);
}
rtsp_st->transport_priv = NULL;
@ -594,6 +599,8 @@ void ff_rtsp_close_streams(AVFormatContext *s)
if (rt->asf_ctx) {
avformat_close_input(&rt->asf_ctx);
}
if (rt->ts && CONFIG_RTPDEC)
ff_mpegts_parse_close(rt->ts);
av_free(rt->p);
av_free(rt->recvbuf);
}
@ -617,6 +624,8 @@ int ff_rtsp_open_transport_ctx(AVFormatContext *s, RTSPStream *rtsp_st)
rtsp_st->rtp_handle = NULL;
if (ret < 0)
return ret;
} else if (rt->transport == RTSP_TRANSPORT_RAW) {
return 0; // Don't need to open any parser here
} else if (rt->transport == RTSP_TRANSPORT_RDT && CONFIG_RTPDEC)
rtsp_st->transport_priv = ff_rdt_parse_open(s, st->index,
rtsp_st->dynamic_protocol_context,
@ -629,7 +638,7 @@ int ff_rtsp_open_transport_ctx(AVFormatContext *s, RTSPStream *rtsp_st)
if (!rtsp_st->transport_priv) {
return AVERROR(ENOMEM);
} else if (rt->transport != RTSP_TRANSPORT_RDT && CONFIG_RTPDEC) {
} else if (rt->transport == RTSP_TRANSPORT_RTP && CONFIG_RTPDEC) {
if (rtsp_st->dynamic_handler) {
ff_rtp_parse_set_dynamic_protocol(rtsp_st->transport_priv,
rtsp_st->dynamic_protocol_context,
@ -698,6 +707,15 @@ static void rtsp_parse_transport(RTSPMessageHeader *reply, const char *p)
get_word_sep(lower_transport, sizeof(lower_transport), "/;,", &p);
profile[0] = '\0';
th->transport = RTSP_TRANSPORT_RDT;
} else if (!av_strcasecmp(transport_protocol, "raw")) {
get_word_sep(profile, sizeof(profile), "/;,", &p);
lower_transport[0] = '\0';
/* raw/raw/<protocol> */
if (*p == '/') {
get_word_sep(lower_transport, sizeof(lower_transport),
";,", &p);
}
th->transport = RTSP_TRANSPORT_RAW;
}
if (!av_strcasecmp(lower_transport, "TCP"))
th->lower_transport = RTSP_LOWER_TRANSPORT_TCP;
@ -1187,6 +1205,8 @@ int ff_rtsp_make_setup_request(AVFormatContext *s, const char *host, int port,
if (rt->transport == RTSP_TRANSPORT_RDT)
trans_pref = "x-pn-tng";
else if (rt->transport == RTSP_TRANSPORT_RAW)
trans_pref = "RAW/RAW";
else
trans_pref = "RTP/AVP";
@ -1753,8 +1773,15 @@ int ff_rtsp_fetch_packet(AVFormatContext *s, AVPacket *pkt)
if (rt->cur_transport_priv) {
if (rt->transport == RTSP_TRANSPORT_RDT) {
ret = ff_rdt_parse_packet(rt->cur_transport_priv, pkt, NULL, 0);
} else
} else if (rt->transport == RTSP_TRANSPORT_RTP) {
ret = ff_rtp_parse_packet(rt->cur_transport_priv, pkt, NULL, 0);
} else if (rt->ts && CONFIG_RTPDEC) {
ret = ff_mpegts_parse_packet(rt->ts, pkt, rt->recvbuf + rt->recvbuf_pos, rt->recvbuf_len - rt->recvbuf_pos);
if (ret >= 0) {
rt->recvbuf_pos += ret;
ret = rt->recvbuf_pos < rt->recvbuf_len;
}
}
if (ret == 0) {
rt->cur_transport_priv = NULL;
return 0;
@ -1817,7 +1844,7 @@ int ff_rtsp_fetch_packet(AVFormatContext *s, AVPacket *pkt)
return AVERROR_EOF;
if (rt->transport == RTSP_TRANSPORT_RDT) {
ret = ff_rdt_parse_packet(rtsp_st->transport_priv, pkt, &rt->recvbuf, len);
} else {
} else if (rt->transport == RTSP_TRANSPORT_RTP) {
ret = ff_rtp_parse_packet(rtsp_st->transport_priv, pkt, &rt->recvbuf, len);
if (ret < 0) {
/* Either bad packet, or a RTCP packet. Check if the
@ -1856,6 +1883,20 @@ int ff_rtsp_fetch_packet(AVFormatContext *s, AVPacket *pkt)
return AVERROR_EOF;
}
}
} else if (rt->ts && CONFIG_RTPDEC) {
ret = ff_mpegts_parse_packet(rt->ts, pkt, rt->recvbuf, len);
if (ret >= 0) {
if (ret < len) {
rt->recvbuf_len = len;
rt->recvbuf_pos = ret;
rt->cur_transport_priv = rt->ts;
return 1;
} else {
ret = 0;
}
}
} else {
return AVERROR_INVALIDDATA;
}
end:
if (ret < 0)

View File

@ -52,6 +52,7 @@ enum RTSPLowerTransport {
enum RTSPTransport {
RTSP_TRANSPORT_RTP, /**< Standards-compliant RTP */
RTSP_TRANSPORT_RDT, /**< Realmedia Data Transport */
RTSP_TRANSPORT_RAW, /**< Raw data (over UDP) */
RTSP_TRANSPORT_NB
};
@ -310,6 +311,13 @@ typedef struct RTSPState {
* other cases, this is a copy of AVFormatContext->filename. */
char control_uri[1024];
/** The following are used for parsing raw mpegts in udp */
//@{
struct MpegTSContext *ts;
int recvbuf_pos;
int recvbuf_len;
//@}
/** Additional output handle, used when input and output are done
* separately, eg for HTTP tunneling. */
URLContext *rtsp_hd_out;

View File

@ -31,7 +31,7 @@
#define LIBAVFORMAT_VERSION_MAJOR 54
#define LIBAVFORMAT_VERSION_MINOR 22
#define LIBAVFORMAT_VERSION_MICRO 103
#define LIBAVFORMAT_VERSION_MICRO 104
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \

View File

@ -93,38 +93,10 @@ struct AVDictionary {
# define offsetof(T, F) ((unsigned int)((char *)&((T *)0)->F))
#endif
/* Use to export labels from asm. */
#define LABEL_MANGLE(a) EXTERN_PREFIX #a
// Use rip-relative addressing if compiling PIC code on x86-64.
#if ARCH_X86_64 && defined(PIC)
# define LOCAL_MANGLE(a) #a "(%%rip)"
#else
# define LOCAL_MANGLE(a) #a
#endif
#define MANGLE(a) EXTERN_PREFIX LOCAL_MANGLE(a)
/* debug stuff */
#define av_abort() do { av_log(NULL, AV_LOG_ERROR, "Abort at %s:%d\n", __FILE__, __LINE__); abort(); } while (0)
/* math */
#if ARCH_X86 && HAVE_INLINE_ASM
#define MASK_ABS(mask, level)\
__asm__ volatile(\
"cltd \n\t"\
"xorl %1, %0 \n\t"\
"subl %1, %0 \n\t"\
: "+a" (level), "=&d" (mask)\
);
#else
#define MASK_ABS(mask, level)\
mask = level >> 31;\
level = (level ^ mask) - mask;
#endif
/* avoid usage of dangerous/inappropriate system functions */
#undef malloc
#define malloc please_use_av_malloc

110
libavutil/x86/asm.h Normal file
View File

@ -0,0 +1,110 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_X86_ASM_H
#define AVUTIL_X86_ASM_H
#include <stdint.h>
#include "config.h"
#if ARCH_X86_64
# define OPSIZE "q"
# define REG_a "rax"
# define REG_b "rbx"
# define REG_c "rcx"
# define REG_d "rdx"
# define REG_D "rdi"
# define REG_S "rsi"
# define PTR_SIZE "8"
typedef int64_t x86_reg;
# define REG_SP "rsp"
# define REG_BP "rbp"
# define REGBP rbp
# define REGa rax
# define REGb rbx
# define REGc rcx
# define REGd rdx
# define REGSP rsp
#elif ARCH_X86_32
# define OPSIZE "l"
# define REG_a "eax"
# define REG_b "ebx"
# define REG_c "ecx"
# define REG_d "edx"
# define REG_D "edi"
# define REG_S "esi"
# define PTR_SIZE "4"
typedef int32_t x86_reg;
# define REG_SP "esp"
# define REG_BP "ebp"
# define REGBP ebp
# define REGa eax
# define REGb ebx
# define REGc ecx
# define REGd edx
# define REGSP esp
#else
typedef int x86_reg;
#endif
#define HAVE_7REGS (ARCH_X86_64 || (HAVE_EBX_AVAILABLE && HAVE_EBP_AVAILABLE))
#define HAVE_6REGS (ARCH_X86_64 || (HAVE_EBX_AVAILABLE || HAVE_EBP_AVAILABLE))
#if ARCH_X86_64 && defined(PIC)
# define BROKEN_RELOCATIONS 1
#endif
/*
* If gcc is not set to support sse (-msse) it will not accept xmm registers
* in the clobber list for inline asm. XMM_CLOBBERS takes a list of xmm
* registers to be marked as clobbered and evaluates to nothing if they are
* not supported, or to the list itself if they are supported. Since a clobber
* list may not be empty, XMM_CLOBBERS_ONLY should be used if the xmm
* registers are the only in the clobber list.
* For example a list with "eax" and "xmm0" as clobbers should become:
* : XMM_CLOBBERS("xmm0",) "eax"
* and a list with only "xmm0" should become:
* XMM_CLOBBERS_ONLY("xmm0")
*/
#if HAVE_XMM_CLOBBERS
# define XMM_CLOBBERS(...) __VA_ARGS__
# define XMM_CLOBBERS_ONLY(...) : __VA_ARGS__
#else
# define XMM_CLOBBERS(...)
# define XMM_CLOBBERS_ONLY(...)
#endif
/* Use to export labels from asm. */
#define LABEL_MANGLE(a) EXTERN_PREFIX #a
// Use rip-relative addressing if compiling PIC code on x86-64.
#if ARCH_X86_64 && defined(PIC)
# define LOCAL_MANGLE(a) #a "(%%rip)"
#else
# define LOCAL_MANGLE(a) #a
#endif
#define MANGLE(a) EXTERN_PREFIX LOCAL_MANGLE(a)
#endif /* AVUTIL_X86_ASM_H */

View File

@ -22,7 +22,7 @@
#include <stdlib.h>
#include <string.h>
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#if HAVE_INLINE_ASM

View File

@ -1,98 +1 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_X86_CPU_H
#define AVUTIL_X86_CPU_H
#include <stdint.h>
#include "config.h"
#if ARCH_X86_64
# define OPSIZE "q"
# define REG_a "rax"
# define REG_b "rbx"
# define REG_c "rcx"
# define REG_d "rdx"
# define REG_D "rdi"
# define REG_S "rsi"
# define PTR_SIZE "8"
typedef int64_t x86_reg;
# define REG_SP "rsp"
# define REG_BP "rbp"
# define REGBP rbp
# define REGa rax
# define REGb rbx
# define REGc rcx
# define REGd rdx
# define REGSP rsp
#elif ARCH_X86_32
# define OPSIZE "l"
# define REG_a "eax"
# define REG_b "ebx"
# define REG_c "ecx"
# define REG_d "edx"
# define REG_D "edi"
# define REG_S "esi"
# define PTR_SIZE "4"
typedef int32_t x86_reg;
# define REG_SP "esp"
# define REG_BP "ebp"
# define REGBP ebp
# define REGa eax
# define REGb ebx
# define REGc ecx
# define REGd edx
# define REGSP esp
#else
typedef int x86_reg;
#endif
#define HAVE_7REGS (ARCH_X86_64 || (HAVE_EBX_AVAILABLE && HAVE_EBP_AVAILABLE))
#define HAVE_6REGS (ARCH_X86_64 || (HAVE_EBX_AVAILABLE || HAVE_EBP_AVAILABLE))
#if ARCH_X86_64 && defined(PIC)
# define BROKEN_RELOCATIONS 1
#endif
/*
* If gcc is not set to support sse (-msse) it will not accept xmm registers
* in the clobber list for inline asm. XMM_CLOBBERS takes a list of xmm
* registers to be marked as clobbered and evaluates to nothing if they are
* not supported, or to the list itself if they are supported. Since a clobber
* list may not be empty, XMM_CLOBBERS_ONLY should be used if the xmm
* registers are the only in the clobber list.
* For example a list with "eax" and "xmm0" as clobbers should become:
* : XMM_CLOBBERS("xmm0",) "eax"
* and a list with only "xmm0" should become:
* XMM_CLOBBERS_ONLY("xmm0")
*/
#if HAVE_XMM_CLOBBERS
# define XMM_CLOBBERS(...) __VA_ARGS__
# define XMM_CLOBBERS_ONLY(...) : __VA_ARGS__
#else
# define XMM_CLOBBERS(...)
# define XMM_CLOBBERS_ONLY(...)
#endif
#endif /* AVUTIL_X86_CPU_H */
#include "libavutil/x86/asm.h"

View File

@ -23,7 +23,7 @@
* mmx/mmx2/3dnow postprocess code.
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#undef REAL_PAVGB
#undef PAVGB

View File

@ -18,7 +18,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#include "libswresample/swresample_internal.h"

View File

@ -46,7 +46,7 @@
#include "libavutil/mathematics.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "rgb2rgb.h"
#include "swscale.h"
#include "swscale_internal.h"

View File

@ -27,7 +27,7 @@
#include "config.h"
#include "libavutil/attributes.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#include "libavutil/bswap.h"
#include "libswscale/rgb2rgb.h"

View File

@ -25,7 +25,7 @@
#include "libavutil/attributes.h"
#include "libavutil/avassert.h"
#include "libavutil/intreadwrite.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#include "libavutil/pixdesc.h"

View File

@ -34,7 +34,7 @@
#include "libswscale/swscale.h"
#include "libswscale/swscale_internal.h"
#include "libavutil/attributes.h"
#include "libavutil/x86_cpu.h"
#include "libavutil/x86/asm.h"
#include "libavutil/cpu.h"
#if HAVE_INLINE_ASM