lavu: Drop the {minus,plus}1 suffix from AVComponentDescriptor fields

The new fields can be accessed directly and are more intelligible.

Signed-off-by: Vittorio Giovara <vittorio.giovara@gmail.com>
This commit is contained in:
Vittorio Giovara 2015-09-03 13:44:14 +02:00
parent 6b3ef7f080
commit 2268db2cd0
19 changed files with 423 additions and 402 deletions

View File

@ -17,6 +17,8 @@ API changes, most recent first:
xxxxxxx - Change type of AVPixFmtDescriptor.flags from uint8_t to uint64_t. xxxxxxx - Change type of AVPixFmtDescriptor.flags from uint8_t to uint64_t.
xxxxxxx - Change type of AVComponentDescriptor fields from uint16_t to int xxxxxxx - Change type of AVComponentDescriptor fields from uint16_t to int
and drop bit packing. and drop bit packing.
xxxxxxx - Add step, offset, and depth to AVComponentDescriptor to replace
the deprecated step_minus1, offset_plus1, and depth_minus1.
2015-xx-xx - lavu 54.17.0 2015-xx-xx - lavu 54.17.0
xxxxxxx - Add av_blowfish_alloc(). xxxxxxx - Add av_blowfish_alloc().

View File

@ -2620,7 +2620,7 @@ static int verify_md5(HEVCContext *s, AVFrame *frame)
if (!desc) if (!desc)
return AVERROR(EINVAL); return AVERROR(EINVAL);
pixel_shift = desc->comp[0].depth_minus1 > 7; pixel_shift = desc->comp[0].depth > 8;
av_log(s->avctx, AV_LOG_DEBUG, "Verifying checksum for frame with POC %d: ", av_log(s->avctx, AV_LOG_DEBUG, "Verifying checksum for frame with POC %d: ",
s->poc); s->poc);

View File

@ -198,7 +198,7 @@ int ff_hevc_output_frame(HEVCContext *s, AVFrame *out, int flush)
if (!desc) if (!desc)
return AVERROR_BUG; return AVERROR_BUG;
pixel_shift = desc->comp[0].depth_minus1 > 7; pixel_shift = desc->comp[0].depth > 8;
ret = av_frame_ref(out, frame->frame); ret = av_frame_ref(out, frame->frame);
ff_hevc_unref_frame(s, frame, HEVC_FRAME_FLAG_OUTPUT); ff_hevc_unref_frame(s, frame, HEVC_FRAME_FLAG_OUTPUT);

View File

@ -67,7 +67,7 @@ int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt,
return 0; return 0;
for (i = 0; i < nb_components; i++) for (i = 0; i < nb_components; i++)
if (src_desc->comp[i].depth_minus1 > dst_desc->comp[i].depth_minus1) if (src_desc->comp[i].depth > dst_desc->comp[i].depth)
loss |= FF_LOSS_DEPTH; loss |= FF_LOSS_DEPTH;
if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w || if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w ||

View File

@ -94,22 +94,22 @@ static int libopenjpeg_matches_pix_fmt(const opj_image_t *img,
switch (desc->nb_components) { switch (desc->nb_components) {
case 4: case 4:
match = match && match = match &&
desc->comp[3].depth_minus1 + 1 >= img->comps[3].prec && desc->comp[3].depth >= img->comps[3].prec &&
1 == img->comps[3].dx && 1 == img->comps[3].dx &&
1 == img->comps[3].dy; 1 == img->comps[3].dy;
case 3: case 3:
match = match && match = match &&
desc->comp[2].depth_minus1 + 1 >= img->comps[2].prec && desc->comp[2].depth >= img->comps[2].prec &&
1 << desc->log2_chroma_w == img->comps[2].dx && 1 << desc->log2_chroma_w == img->comps[2].dx &&
1 << desc->log2_chroma_h == img->comps[2].dy; 1 << desc->log2_chroma_h == img->comps[2].dy;
case 2: case 2:
match = match && match = match &&
desc->comp[1].depth_minus1 + 1 >= img->comps[1].prec && desc->comp[1].depth >= img->comps[1].prec &&
1 << desc->log2_chroma_w == img->comps[1].dx && 1 << desc->log2_chroma_w == img->comps[1].dx &&
1 << desc->log2_chroma_h == img->comps[1].dy; 1 << desc->log2_chroma_h == img->comps[1].dy;
case 1: case 1:
match = match && match = match &&
desc->comp[0].depth_minus1 + 1 >= img->comps[0].prec && desc->comp[0].depth >= img->comps[0].prec &&
1 == img->comps[0].dx && 1 == img->comps[0].dx &&
1 == img->comps[0].dy; 1 == img->comps[0].dy;
default: default:
@ -365,7 +365,7 @@ static int libopenjpeg_decode_frame(AVCodecContext *avctx,
} }
desc = av_pix_fmt_desc_get(avctx->pix_fmt); desc = av_pix_fmt_desc_get(avctx->pix_fmt);
pixel_size = desc->comp[0].step_minus1 + 1; pixel_size = desc->comp[0].step;
ispacked = libopenjpeg_ispacked(avctx->pix_fmt); ispacked = libopenjpeg_ispacked(avctx->pix_fmt);
switch (pixel_size) { switch (pixel_size) {

View File

@ -131,8 +131,8 @@ static opj_image_t *libopenjpeg_create_image(AVCodecContext *avctx,
} }
for (i = 0; i < numcomps; i++) { for (i = 0; i < numcomps; i++) {
cmptparm[i].prec = desc->comp[i].depth_minus1 + 1; cmptparm[i].prec = desc->comp[i].depth;
cmptparm[i].bpp = desc->comp[i].depth_minus1 + 1; cmptparm[i].bpp = desc->comp[i].depth;
cmptparm[i].sgnd = 0; cmptparm[i].sgnd = 0;
cmptparm[i].dx = sub_dx[i]; cmptparm[i].dx = sub_dx[i];
cmptparm[i].dy = sub_dy[i]; cmptparm[i].dy = sub_dy[i];

View File

@ -78,7 +78,7 @@ static av_cold int libx265_encode_init(AVCodecContext *avctx)
{ {
libx265Context *ctx = avctx->priv_data; libx265Context *ctx = avctx->priv_data;
ctx->api = x265_api_get(av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth_minus1 + 1); ctx->api = x265_api_get(av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth);
if (!ctx->api) if (!ctx->api)
ctx->api = x265_api_get(0); ctx->api = x265_api_get(0);
@ -261,7 +261,7 @@ static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
} }
x265pic.pts = pic->pts; x265pic.pts = pic->pts;
x265pic.bitDepth = av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth_minus1 + 1; x265pic.bitDepth = av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth;
x265pic.sliceType = pic->pict_type == AV_PICTURE_TYPE_I ? X265_TYPE_I : x265pic.sliceType = pic->pict_type == AV_PICTURE_TYPE_I ? X265_TYPE_I :
pic->pict_type == AV_PICTURE_TYPE_P ? X265_TYPE_P : pic->pict_type == AV_PICTURE_TYPE_P ? X265_TYPE_P :

View File

@ -83,7 +83,7 @@ static int pnm_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
"P%c\n%d %d\n", c, avctx->width, h1); "P%c\n%d %d\n", c, avctx->width, h1);
bytestream += strlen(bytestream); bytestream += strlen(bytestream);
if (avctx->pix_fmt != AV_PIX_FMT_MONOWHITE) { if (avctx->pix_fmt != AV_PIX_FMT_MONOWHITE) {
int maxdepth = (1 << (av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth_minus1 + 1)) - 1; int maxdepth = (1 << av_pix_fmt_desc_get(avctx->pix_fmt)->comp[0].depth) - 1;
snprintf(bytestream, bytestream_end - bytestream, snprintf(bytestream, bytestream_end - bytestream,
"%d\n", maxdepth); "%d\n", maxdepth);
bytestream += strlen(bytestream); bytestream += strlen(bytestream);

View File

@ -269,7 +269,7 @@ static int config_input(AVFilterLink *inlink)
s->hsub = desc->log2_chroma_w; s->hsub = desc->log2_chroma_w;
s->vsub = desc->log2_chroma_h; s->vsub = desc->log2_chroma_h;
s->depth = desc->comp[0].depth_minus1+1; s->depth = desc->comp[0].depth;
s->line = av_malloc(inlink->w * sizeof(*s->line)); s->line = av_malloc(inlink->w * sizeof(*s->line));
if (!s->line) if (!s->line)

View File

@ -186,7 +186,7 @@ static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
YADIFContext *s = ctx->priv; YADIFContext *s = ctx->priv;
ThreadData *td = arg; ThreadData *td = arg;
int refs = s->cur->linesize[td->plane]; int refs = s->cur->linesize[td->plane];
int df = (s->csp->comp[td->plane].depth_minus1 + 8) / 8; int df = (s->csp->comp[td->plane].depth + 7) / 8;
int pix_3 = 3 * df; int pix_3 = 3 * df;
int slice_h = td->h / nb_jobs; int slice_h = td->h / nb_jobs;
int slice_start = jobnr * slice_h; int slice_start = jobnr * slice_h;
@ -463,7 +463,7 @@ static int config_props(AVFilterLink *link)
link->h = link->src->inputs[0]->h; link->h = link->src->inputs[0]->h;
s->csp = av_pix_fmt_desc_get(link->format); s->csp = av_pix_fmt_desc_get(link->format);
if (s->csp->comp[0].depth_minus1 / 8 == 1) { if (s->csp->comp[0].depth > 8) {
s->filter_line = filter_line_c_16bit; s->filter_line = filter_line_c_16bit;
s->filter_edges = filter_edges_16bit; s->filter_edges = filter_edges_16bit;
} else { } else {

View File

@ -40,8 +40,8 @@ void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4],
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
const AVComponentDescriptor *comp = &(pixdesc->comp[i]); const AVComponentDescriptor *comp = &(pixdesc->comp[i]);
if ((comp->step_minus1+1) > max_pixsteps[comp->plane]) { if (comp->step > max_pixsteps[comp->plane]) {
max_pixsteps[comp->plane] = comp->step_minus1+1; max_pixsteps[comp->plane] = comp->step;
if (max_pixstep_comps) if (max_pixstep_comps)
max_pixstep_comps[comp->plane] = i; max_pixstep_comps[comp->plane] = i;
} }
@ -59,7 +59,7 @@ int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
return AVERROR(EINVAL); return AVERROR(EINVAL);
if (desc->flags & AV_PIX_FMT_FLAG_BITSTREAM) if (desc->flags & AV_PIX_FMT_FLAG_BITSTREAM)
return (width * (desc->comp[0].step_minus1+1) + 7) >> 3; return (width * desc->comp[0].step + 7) >> 3;
av_image_fill_max_pixsteps(max_step, max_step_comp, desc); av_image_fill_max_pixsteps(max_step, max_step_comp, desc);
s = (max_step_comp[plane] == 1 || max_step_comp[plane] == 2) ? desc->log2_chroma_w : 0; s = (max_step_comp[plane] == 1 || max_step_comp[plane] == 2) ? desc->log2_chroma_w : 0;
@ -79,9 +79,9 @@ int av_image_fill_linesizes(int linesizes[4], enum AVPixelFormat pix_fmt, int wi
return AVERROR(EINVAL); return AVERROR(EINVAL);
if (desc->flags & AV_PIX_FMT_FLAG_BITSTREAM) { if (desc->flags & AV_PIX_FMT_FLAG_BITSTREAM) {
if (width > (INT_MAX -7) / (desc->comp[0].step_minus1+1)) if (width > (INT_MAX - 7) / desc->comp[0].step)
return AVERROR(EINVAL); return AVERROR(EINVAL);
linesizes[0] = (width * (desc->comp[0].step_minus1+1) + 7) >> 3; linesizes[0] = (width * desc->comp[0].step + 7) >> 3;
return 0; return 0;
} }

File diff suppressed because it is too large Load Diff

View File

@ -34,16 +34,16 @@ typedef struct AVComponentDescriptor {
int plane; int plane;
/** /**
* Number of elements between 2 horizontally consecutive pixels minus 1. * Number of elements between 2 horizontally consecutive pixels.
* Elements are bits for bitstream formats, bytes otherwise. * Elements are bits for bitstream formats, bytes otherwise.
*/ */
int step_minus1; int step;
/** /**
* Number of elements before the component of the first pixel plus 1. * Number of elements before the component of the first pixel.
* Elements are bits for bitstream formats, bytes otherwise. * Elements are bits for bitstream formats, bytes otherwise.
*/ */
int offset_plus1; int offset;
/** /**
* Number of least significant bits that must be shifted away * Number of least significant bits that must be shifted away
@ -52,9 +52,20 @@ typedef struct AVComponentDescriptor {
int shift; int shift;
/** /**
* Number of bits in the component minus 1. * Number of bits in the component.
*/ */
int depth_minus1; int depth;
#if FF_API_PLUS1_MINUS1
/** deprecated, use step instead */
attribute_deprecated int step_minus1;
/** deprecated, use depth instead */
attribute_deprecated int depth_minus1;
/** deprecated, use offset instead */
attribute_deprecated int offset_plus1;
#endif
} AVComponentDescriptor; } AVComponentDescriptor;
/** /**

View File

@ -93,6 +93,9 @@
#ifndef FF_API_CRYPTO_CONTEXT #ifndef FF_API_CRYPTO_CONTEXT
#define FF_API_CRYPTO_CONTEXT (LIBAVUTIL_VERSION_MAJOR < 56) #define FF_API_CRYPTO_CONTEXT (LIBAVUTIL_VERSION_MAJOR < 56)
#endif #endif
#ifndef FF_API_PLUS1_MINUS1
#define FF_API_PLUS1_MINUS1 (LIBAVUTIL_VERSION_MAJOR < 56)
#endif
/** /**

View File

@ -1279,7 +1279,7 @@ yuv2gbrp_full_X_c(SwsContext *c, const int16_t *lumFilter,
int i; int i;
int hasAlpha = (desc->flags & AV_PIX_FMT_FLAG_ALPHA) && alpSrc; int hasAlpha = (desc->flags & AV_PIX_FMT_FLAG_ALPHA) && alpSrc;
uint16_t **dest16 = (uint16_t**)dest; uint16_t **dest16 = (uint16_t**)dest;
int SH = 22 + 7 - desc->comp[0].depth_minus1; int SH = 22 + 8 - desc->comp[0].depth;
for (i = 0; i < dstW; i++) { for (i = 0; i < dstW; i++) {
int j; int j;
@ -1366,7 +1366,7 @@ av_cold void ff_sws_init_output_funcs(SwsContext *c,
*yuv2planeX = isBE(dstFormat) ? yuv2planeX_16BE_c : yuv2planeX_16LE_c; *yuv2planeX = isBE(dstFormat) ? yuv2planeX_16BE_c : yuv2planeX_16LE_c;
*yuv2plane1 = isBE(dstFormat) ? yuv2plane1_16BE_c : yuv2plane1_16LE_c; *yuv2plane1 = isBE(dstFormat) ? yuv2plane1_16BE_c : yuv2plane1_16LE_c;
} else if (is9_OR_10BPS(dstFormat)) { } else if (is9_OR_10BPS(dstFormat)) {
if (desc->comp[0].depth_minus1 == 8) { if (desc->comp[0].depth == 9) {
*yuv2planeX = isBE(dstFormat) ? yuv2planeX_9BE_c : yuv2planeX_9LE_c; *yuv2planeX = isBE(dstFormat) ? yuv2planeX_9BE_c : yuv2planeX_9LE_c;
*yuv2plane1 = isBE(dstFormat) ? yuv2plane1_9BE_c : yuv2plane1_9LE_c; *yuv2plane1 = isBE(dstFormat) ? yuv2plane1_9BE_c : yuv2plane1_9LE_c;
} else { } else {

View File

@ -91,7 +91,7 @@ static void hScale16To19_c(SwsContext *c, int16_t *_dst, int dstW,
int i; int i;
int32_t *dst = (int32_t *) _dst; int32_t *dst = (int32_t *) _dst;
const uint16_t *src = (const uint16_t *) _src; const uint16_t *src = (const uint16_t *) _src;
int bits = desc->comp[0].depth_minus1; int bits = desc->comp[0].depth - 1;
int sh = bits - 4; int sh = bits - 4;
for (i = 0; i < dstW; i++) { for (i = 0; i < dstW; i++) {
@ -114,7 +114,7 @@ static void hScale16To15_c(SwsContext *c, int16_t *dst, int dstW,
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(c->srcFormat); const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(c->srcFormat);
int i; int i;
const uint16_t *src = (const uint16_t *) _src; const uint16_t *src = (const uint16_t *) _src;
int sh = desc->comp[0].depth_minus1; int sh = desc->comp[0].depth - 1;
for (i = 0; i < dstW; i++) { for (i = 0; i < dstW; i++) {
int j; int j;
@ -699,8 +699,7 @@ static int swscale(SwsContext *c, const uint8_t *src[],
if (is9_OR_10BPS(dstFormat)) { if (is9_OR_10BPS(dstFormat)) {
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(dstFormat); const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(dstFormat);
fill_plane9or10(dst[3], dstStride[3], length, height, lastDstY, fill_plane9or10(dst[3], dstStride[3], length, height, lastDstY,
255, desc->comp[3].depth_minus1 + 1, 255, desc->comp[3].depth, isBE(dstFormat));
isBE(dstFormat));
} else } else
fillPlane(dst[3], dstStride[3], length, height, lastDstY, 255); fillPlane(dst[3], dstStride[3], length, height, lastDstY, 255);
} }

View File

@ -562,14 +562,14 @@ static av_always_inline int is16BPS(enum AVPixelFormat pix_fmt)
{ {
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt); const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
av_assert0(desc); av_assert0(desc);
return desc->comp[0].depth_minus1 == 15; return desc->comp[0].depth == 16;
} }
static av_always_inline int is9_OR_10BPS(enum AVPixelFormat pix_fmt) static av_always_inline int is9_OR_10BPS(enum AVPixelFormat pix_fmt)
{ {
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt); const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
av_assert0(desc); av_assert0(desc);
return desc->comp[0].depth_minus1 == 8 || desc->comp[0].depth_minus1 == 9; return desc->comp[0].depth == 9 || desc->comp[0].depth == 10;
} }
static av_always_inline int isBE(enum AVPixelFormat pix_fmt) static av_always_inline int isBE(enum AVPixelFormat pix_fmt)

View File

@ -816,15 +816,15 @@ static int planarCopyWrapper(SwsContext *c, const uint8_t *src[],
if (is9_OR_10BPS(c->dstFormat)) { if (is9_OR_10BPS(c->dstFormat)) {
fill_plane9or10(dst[plane], dstStride[plane], fill_plane9or10(dst[plane], dstStride[plane],
length, height, y, val, length, height, y, val,
desc_dst->comp[plane].depth_minus1 + 1, desc_dst->comp[plane].depth,
isBE(c->dstFormat)); isBE(c->dstFormat));
} else } else
fillPlane(dst[plane], dstStride[plane], length, height, y, fillPlane(dst[plane], dstStride[plane], length, height, y,
val); val);
} else { } else {
if (is9_OR_10BPS(c->srcFormat)) { if (is9_OR_10BPS(c->srcFormat)) {
const int src_depth = desc_src->comp[plane].depth_minus1 + 1; const int src_depth = desc_src->comp[plane].depth;
const int dst_depth = desc_dst->comp[plane].depth_minus1 + 1; const int dst_depth = desc_dst->comp[plane].depth;
const uint16_t *srcPtr2 = (const uint16_t *) srcPtr; const uint16_t *srcPtr2 = (const uint16_t *) srcPtr;
if (is16BPS(c->dstFormat)) { if (is16BPS(c->dstFormat)) {
@ -915,7 +915,7 @@ static int planarCopyWrapper(SwsContext *c, const uint8_t *src[],
} }
} }
} else if (is9_OR_10BPS(c->dstFormat)) { } else if (is9_OR_10BPS(c->dstFormat)) {
const int dst_depth = desc_dst->comp[plane].depth_minus1 + 1; const int dst_depth = desc_dst->comp[plane].depth;
uint16_t *dstPtr2 = (uint16_t *) dstPtr; uint16_t *dstPtr2 = (uint16_t *) dstPtr;
if (is16BPS(c->srcFormat)) { if (is16BPS(c->srcFormat)) {
@ -1006,7 +1006,7 @@ static int planarCopyWrapper(SwsContext *c, const uint8_t *src[],
} else { } else {
if (is16BPS(c->srcFormat) && is16BPS(c->dstFormat)) if (is16BPS(c->srcFormat) && is16BPS(c->dstFormat))
length *= 2; length *= 2;
else if (!desc_src->comp[0].depth_minus1) else if (desc_src->comp[0].depth == 1)
length >>= 3; // monowhite/black length >>= 3; // monowhite/black
for (i = 0; i < height; i++) { for (i = 0; i < height; i++) {
memcpy(dstPtr, srcPtr, length); memcpy(dstPtr, srcPtr, length);
@ -1087,7 +1087,7 @@ void ff_get_unscaled_swscale(SwsContext *c)
if (srcFormat == AV_PIX_FMT_GBRP && isPlanar(srcFormat) && isByteRGB(dstFormat)) if (srcFormat == AV_PIX_FMT_GBRP && isPlanar(srcFormat) && isByteRGB(dstFormat))
c->swscale = planarRgbToRgbWrapper; c->swscale = planarRgbToRgbWrapper;
if (av_pix_fmt_desc_get(srcFormat)->comp[0].depth_minus1 == 7 && if (av_pix_fmt_desc_get(srcFormat)->comp[0].depth == 8 &&
isPackedRGB(srcFormat) && dstFormat == AV_PIX_FMT_GBRP) isPackedRGB(srcFormat) && dstFormat == AV_PIX_FMT_GBRP)
c->swscale = rgbToPlanarRgbWrapper; c->swscale = rgbToPlanarRgbWrapper;

View File

@ -1039,10 +1039,10 @@ av_cold int sws_init_context(SwsContext *c, SwsFilter *srcFilter,
} }
} }
c->srcBpc = 1 + desc_src->comp[0].depth_minus1; c->srcBpc = desc_src->comp[0].depth;
if (c->srcBpc < 8) if (c->srcBpc < 8)
c->srcBpc = 8; c->srcBpc = 8;
c->dstBpc = 1 + desc_dst->comp[0].depth_minus1; c->dstBpc = desc_dst->comp[0].depth;
if (c->dstBpc < 8) if (c->dstBpc < 8)
c->dstBpc = 8; c->dstBpc = 8;
if (c->dstBpc == 16) if (c->dstBpc == 16)