mpegvideo.c
Go to the documentation of this file.
1 /*
2  * The simplest mpeg encoder (well, it was the simplest!)
3  * Copyright (c) 2000,2001 Fabrice Bellard
4  * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * 4MV & hq & B-frame encoding stuff by Michael Niedermayer <michaelni@gmx.at>
7  *
8  * This file is part of Libav.
9  *
10  * Libav is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * Libav is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with Libav; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24 
30 #include "libavutil/imgutils.h"
31 #include "avcodec.h"
32 #include "dsputil.h"
33 #include "internal.h"
34 #include "mathops.h"
35 #include "mpegvideo.h"
36 #include "mjpegenc.h"
37 #include "msmpeg4.h"
38 #include "xvmc_internal.h"
39 #include "thread.h"
40 #include <limits.h>
41 
42 //#undef NDEBUG
43 //#include <assert.h>
44 
46  DCTELEM *block, int n, int qscale);
48  DCTELEM *block, int n, int qscale);
50  DCTELEM *block, int n, int qscale);
52  DCTELEM *block, int n, int qscale);
54  DCTELEM *block, int n, int qscale);
56  DCTELEM *block, int n, int qscale);
58  DCTELEM *block, int n, int qscale);
59 
60 
61 /* enable all paranoid tests for rounding, overflows, etc... */
62 //#define PARANOID
63 
64 //#define DEBUG
65 
66 
68 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
69  0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
70  16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
71 };
72 
74 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
75  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
76  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
77  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
78  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
79  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
80  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
81  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
82  8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
83 };
84 
85 static const uint8_t mpeg2_dc_scale_table1[128] = {
86 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
87  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
88  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
89  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
90  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
91  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
92  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
93  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
94  4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
95 };
96 
97 static const uint8_t mpeg2_dc_scale_table2[128] = {
98 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
99  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
100  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
101  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
102  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
103  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
104  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
105  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
106  2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
107 };
108 
109 static const uint8_t mpeg2_dc_scale_table3[128] = {
110 // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
111  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
112  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
114  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
115  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
116  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
117  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
118  1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
119 };
120 
121 const uint8_t *const ff_mpeg2_dc_scale_table[4] = {
126 };
127 
131 };
132 
134 #if CONFIG_H264_DXVA2_HWACCEL
136 #endif
137 #if CONFIG_H264_VAAPI_HWACCEL
139 #endif
140 #if CONFIG_H264_VDA_HWACCEL
142 #endif
145 };
146 
147 static void mpeg_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
148  int (*mv)[2][4][2],
149  int mb_x, int mb_y, int mb_intra, int mb_skipped)
150 {
151  MpegEncContext *s = opaque;
152 
153  s->mv_dir = mv_dir;
154  s->mv_type = mv_type;
155  s->mb_intra = mb_intra;
156  s->mb_skipped = mb_skipped;
157  s->mb_x = mb_x;
158  s->mb_y = mb_y;
159  memcpy(s->mv, mv, sizeof(*mv));
160 
163 
164  s->dsp.clear_blocks(s->block[0]);
165 
166  s->dest[0] = s->current_picture.f.data[0] + (s->mb_y * 16 * s->linesize) + s->mb_x * 16;
167  s->dest[1] = s->current_picture.f.data[1] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
168  s->dest[2] = s->current_picture.f.data[2] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
169 
170  assert(ref == 0);
171  ff_MPV_decode_mb(s, s->block);
172 }
173 
175  const uint8_t *end,
176  uint32_t * restrict state)
177 {
178  int i;
179 
180  assert(p <= end);
181  if (p >= end)
182  return end;
183 
184  for (i = 0; i < 3; i++) {
185  uint32_t tmp = *state << 8;
186  *state = tmp + *(p++);
187  if (tmp == 0x100 || p == end)
188  return p;
189  }
190 
191  while (p < end) {
192  if (p[-1] > 1 ) p += 3;
193  else if (p[-2] ) p += 2;
194  else if (p[-3]|(p[-1]-1)) p++;
195  else {
196  p++;
197  break;
198  }
199  }
200 
201  p = FFMIN(p, end) - 4;
202  *state = AV_RB32(p);
203 
204  return p + 4;
205 }
206 
207 /* init common dct for both encoder and decoder */
209 {
210  ff_dsputil_init(&s->dsp, s->avctx);
212 
218  if (s->flags & CODEC_FLAG_BITEXACT)
221 
222 #if ARCH_X86
224 #elif ARCH_ALPHA
226 #elif ARCH_ARM
228 #elif HAVE_ALTIVEC
230 #elif ARCH_BFIN
232 #endif
233 
234  /* load & permutate scantables
235  * note: only wmv uses different ones
236  */
237  if (s->alternate_scan) {
240  } else {
243  }
246 
247  return 0;
248 }
249 
251 {
252  *dst = *src;
253  dst->f.type = FF_BUFFER_TYPE_COPY;
254 }
255 
260 {
261  /* WM Image / Screen codecs allocate internal buffers with different
262  * dimensions / colorspaces; ignore user-defined callbacks for these. */
263  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
266  ff_thread_release_buffer(s->avctx, &pic->f);
267  else
270 }
271 
273 {
274  int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
275 
276  // edge emu needs blocksize + filter length - 1
277  // (= 17x17 for halfpel / 21x21 for h264)
278  // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
279  // at uvlinesize. It supports only YUV420 so 24x24 is enough
280  // linesize * interlaced * MBsize
281  FF_ALLOCZ_OR_GOTO(s->avctx, s->edge_emu_buffer, alloc_size * 2 * 24,
282  fail);
283 
284  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.scratchpad, alloc_size * 2 * 16 * 3,
285  fail)
286  s->me.temp = s->me.scratchpad;
287  s->rd_scratchpad = s->me.scratchpad;
288  s->b_scratchpad = s->me.scratchpad;
289  s->obmc_scratchpad = s->me.scratchpad + 16;
290 
291  return 0;
292 fail:
294  return AVERROR(ENOMEM);
295 }
296 
301 {
302  int r, ret;
303 
304  if (s->avctx->hwaccel) {
305  assert(!pic->f.hwaccel_picture_private);
306  if (s->avctx->hwaccel->priv_data_size) {
308  if (!pic->f.hwaccel_picture_private) {
309  av_log(s->avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
310  return -1;
311  }
312  }
313  }
314 
315  if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
318  r = ff_thread_get_buffer(s->avctx, &pic->f);
319  else
320  r = avcodec_default_get_buffer(s->avctx, &pic->f);
321 
322  if (r < 0 || !pic->f.type || !pic->f.data[0]) {
323  av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %p)\n",
324  r, pic->f.type, pic->f.data[0]);
326  return -1;
327  }
328 
329  if (s->linesize && (s->linesize != pic->f.linesize[0] ||
330  s->uvlinesize != pic->f.linesize[1])) {
332  "get_buffer() failed (stride changed)\n");
333  free_frame_buffer(s, pic);
334  return -1;
335  }
336 
337  if (pic->f.linesize[1] != pic->f.linesize[2]) {
339  "get_buffer() failed (uv stride mismatch)\n");
340  free_frame_buffer(s, pic);
341  return -1;
342  }
343 
344  if (!s->edge_emu_buffer &&
345  (ret = ff_mpv_frame_size_alloc(s, pic->f.linesize[0])) < 0) {
347  "get_buffer() failed to allocate context scratch buffers.\n");
348  free_frame_buffer(s, pic);
349  return ret;
350  }
351 
352  return 0;
353 }
354 
359 int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
360 {
361  const int big_mb_num = s->mb_stride * (s->mb_height + 1) + 1;
362 
363  // the + 1 is needed so memset(,,stride*height) does not sig11
364 
365  const int mb_array_size = s->mb_stride * s->mb_height;
366  const int b8_array_size = s->b8_stride * s->mb_height * 2;
367  const int b4_array_size = s->b4_stride * s->mb_height * 4;
368  int i;
369  int r = -1;
370 
371  if (shared) {
372  assert(pic->f.data[0]);
373  assert(pic->f.type == 0 || pic->f.type == FF_BUFFER_TYPE_SHARED);
375  } else {
376  assert(!pic->f.data[0]);
377 
378  if (alloc_frame_buffer(s, pic) < 0)
379  return -1;
380 
381  s->linesize = pic->f.linesize[0];
382  s->uvlinesize = pic->f.linesize[1];
383  }
384 
385  if (pic->f.qscale_table == NULL) {
386  if (s->encoding) {
387  FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_var,
388  mb_array_size * sizeof(int16_t), fail)
390  mb_array_size * sizeof(int16_t), fail)
392  mb_array_size * sizeof(int8_t ), fail)
393  }
394 
396  mb_array_size * sizeof(uint8_t) + 2, fail)// the + 2 is for the slice end check
398  (big_mb_num + s->mb_stride) * sizeof(uint8_t),
399  fail)
401  (big_mb_num + s->mb_stride) * sizeof(uint32_t),
402  fail)
403  pic->f.mb_type = pic->mb_type_base + 2 * s->mb_stride + 1;
404  pic->f.qscale_table = pic->qscale_table_base + 2 * s->mb_stride + 1;
405  if (s->out_format == FMT_H264) {
406  for (i = 0; i < 2; i++) {
408  2 * (b4_array_size + 4) * sizeof(int16_t),
409  fail)
410  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
411  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
412  4 * mb_array_size * sizeof(uint8_t), fail)
413  }
414  pic->f.motion_subsample_log2 = 2;
415  } else if (s->out_format == FMT_H263 || s->encoding ||
416  (s->avctx->debug & FF_DEBUG_MV) || s->avctx->debug_mv) {
417  for (i = 0; i < 2; i++) {
419  2 * (b8_array_size + 4) * sizeof(int16_t),
420  fail)
421  pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
422  FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
423  4 * mb_array_size * sizeof(uint8_t), fail)
424  }
425  pic->f.motion_subsample_log2 = 3;
426  }
427  if (s->avctx->debug&FF_DEBUG_DCT_COEFF) {
429  64 * mb_array_size * sizeof(DCTELEM) * 6, fail)
430  }
431  pic->f.qstride = s->mb_stride;
433  1 * sizeof(AVPanScan), fail)
434  }
435 
436  pic->owner2 = s;
437 
438  return 0;
439 fail: // for the FF_ALLOCZ_OR_GOTO macro
440  if (r >= 0)
441  free_frame_buffer(s, pic);
442  return -1;
443 }
444 
448 static void free_picture(MpegEncContext *s, Picture *pic)
449 {
450  int i;
451 
452  if (pic->f.data[0] && pic->f.type != FF_BUFFER_TYPE_SHARED) {
453  free_frame_buffer(s, pic);
454  }
455 
456  av_freep(&pic->mb_var);
457  av_freep(&pic->mc_mb_var);
458  av_freep(&pic->mb_mean);
459  av_freep(&pic->f.mbskip_table);
461  pic->f.qscale_table = NULL;
462  av_freep(&pic->mb_type_base);
463  pic->f.mb_type = NULL;
464  av_freep(&pic->f.dct_coeff);
465  av_freep(&pic->f.pan_scan);
466  pic->f.mb_type = NULL;
467  for (i = 0; i < 2; i++) {
468  av_freep(&pic->motion_val_base[i]);
469  av_freep(&pic->f.ref_index[i]);
470  pic->f.motion_val[i] = NULL;
471  }
472 
473  if (pic->f.type == FF_BUFFER_TYPE_SHARED) {
474  for (i = 0; i < 4; i++) {
475  pic->f.base[i] =
476  pic->f.data[i] = NULL;
477  }
478  pic->f.type = 0;
479  }
480 }
481 
483 {
484  int y_size = s->b8_stride * (2 * s->mb_height + 1);
485  int c_size = s->mb_stride * (s->mb_height + 1);
486  int yc_size = y_size + 2 * c_size;
487  int i;
488 
489  s->edge_emu_buffer =
490  s->me.scratchpad =
491  s->me.temp =
492  s->rd_scratchpad =
493  s->b_scratchpad =
494  s->obmc_scratchpad = NULL;
495 
496  if (s->encoding) {
497  FF_ALLOCZ_OR_GOTO(s->avctx, s->me.map,
498  ME_MAP_SIZE * sizeof(uint32_t), fail)
500  ME_MAP_SIZE * sizeof(uint32_t), fail)
501  if (s->avctx->noise_reduction) {
503  2 * 64 * sizeof(int), fail)
504  }
505  }
506  FF_ALLOCZ_OR_GOTO(s->avctx, s->blocks, 64 * 12 * 2 * sizeof(DCTELEM), fail)
507  s->block = s->blocks[0];
508 
509  for (i = 0; i < 12; i++) {
510  s->pblocks[i] = &s->block[i];
511  }
512 
513  if (s->out_format == FMT_H263) {
514  /* ac values */
516  yc_size * sizeof(int16_t) * 16, fail);
517  s->ac_val[0] = s->ac_val_base + s->b8_stride + 1;
518  s->ac_val[1] = s->ac_val_base + y_size + s->mb_stride + 1;
519  s->ac_val[2] = s->ac_val[1] + c_size;
520  }
521 
522  return 0;
523 fail:
524  return -1; // free() through ff_MPV_common_end()
525 }
526 
528 {
529  if (s == NULL)
530  return;
531 
533  av_freep(&s->me.scratchpad);
534  s->me.temp =
535  s->rd_scratchpad =
536  s->b_scratchpad =
537  s->obmc_scratchpad = NULL;
538 
539  av_freep(&s->dct_error_sum);
540  av_freep(&s->me.map);
541  av_freep(&s->me.score_map);
542  av_freep(&s->blocks);
543  av_freep(&s->ac_val_base);
544  s->block = NULL;
545 }
546 
548 {
549 #define COPY(a) bak->a = src->a
550  COPY(edge_emu_buffer);
551  COPY(me.scratchpad);
552  COPY(me.temp);
553  COPY(rd_scratchpad);
554  COPY(b_scratchpad);
555  COPY(obmc_scratchpad);
556  COPY(me.map);
557  COPY(me.score_map);
558  COPY(blocks);
559  COPY(block);
560  COPY(start_mb_y);
561  COPY(end_mb_y);
562  COPY(me.map_generation);
563  COPY(pb);
564  COPY(dct_error_sum);
565  COPY(dct_count[0]);
566  COPY(dct_count[1]);
567  COPY(ac_val_base);
568  COPY(ac_val[0]);
569  COPY(ac_val[1]);
570  COPY(ac_val[2]);
571 #undef COPY
572 }
573 
575 {
576  MpegEncContext bak;
577  int i, ret;
578  // FIXME copy only needed parts
579  // START_TIMER
580  backup_duplicate_context(&bak, dst);
581  memcpy(dst, src, sizeof(MpegEncContext));
582  backup_duplicate_context(dst, &bak);
583  for (i = 0; i < 12; i++) {
584  dst->pblocks[i] = &dst->block[i];
585  }
586  if (!dst->edge_emu_buffer &&
587  (ret = ff_mpv_frame_size_alloc(dst, dst->linesize)) < 0) {
588  av_log(dst->avctx, AV_LOG_ERROR, "failed to allocate context "
589  "scratch buffers.\n");
590  return ret;
591  }
592  // STOP_TIMER("update_duplicate_context")
593  // about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
594  return 0;
595 }
596 
598  const AVCodecContext *src)
599 {
600  int i;
601  MpegEncContext *s = dst->priv_data, *s1 = src->priv_data;
602 
603  if (dst == src || !s1->context_initialized)
604  return 0;
605 
606  // FIXME can parameters change on I-frames?
607  // in that case dst may need a reinit
608  if (!s->context_initialized) {
609  memcpy(s, s1, sizeof(MpegEncContext));
610 
611  s->avctx = dst;
614  s->bitstream_buffer = NULL;
616 
618  }
619 
620  if (s->height != s1->height || s->width != s1->width || s->context_reinit) {
621  int err;
622  s->context_reinit = 0;
623  s->height = s1->height;
624  s->width = s1->width;
625  if ((err = ff_MPV_common_frame_size_change(s)) < 0)
626  return err;
627  }
628 
629  s->avctx->coded_height = s1->avctx->coded_height;
630  s->avctx->coded_width = s1->avctx->coded_width;
631  s->avctx->width = s1->avctx->width;
632  s->avctx->height = s1->avctx->height;
633 
634  s->coded_picture_number = s1->coded_picture_number;
635  s->picture_number = s1->picture_number;
636  s->input_picture_number = s1->input_picture_number;
637 
638  memcpy(s->picture, s1->picture, s1->picture_count * sizeof(Picture));
639  memcpy(&s->last_picture, &s1->last_picture,
640  (char *) &s1->last_picture_ptr - (char *) &s1->last_picture);
641 
642  // reset s->picture[].f.extended_data to s->picture[].f.data
643  for (i = 0; i < s->picture_count; i++)
644  s->picture[i].f.extended_data = s->picture[i].f.data;
645 
646  s->last_picture_ptr = REBASE_PICTURE(s1->last_picture_ptr, s, s1);
647  s->current_picture_ptr = REBASE_PICTURE(s1->current_picture_ptr, s, s1);
648  s->next_picture_ptr = REBASE_PICTURE(s1->next_picture_ptr, s, s1);
649 
650  // Error/bug resilience
651  s->next_p_frame_damaged = s1->next_p_frame_damaged;
652  s->workaround_bugs = s1->workaround_bugs;
653 
654  // MPEG4 timing info
655  memcpy(&s->time_increment_bits, &s1->time_increment_bits,
656  (char *) &s1->shape - (char *) &s1->time_increment_bits);
657 
658  // B-frame info
659  s->max_b_frames = s1->max_b_frames;
660  s->low_delay = s1->low_delay;
661  s->droppable = s1->droppable;
662 
663  // DivX handling (doesn't work)
664  s->divx_packed = s1->divx_packed;
665 
666  if (s1->bitstream_buffer) {
667  if (s1->bitstream_buffer_size +
671  s1->allocated_bitstream_buffer_size);
672  s->bitstream_buffer_size = s1->bitstream_buffer_size;
673  memcpy(s->bitstream_buffer, s1->bitstream_buffer,
674  s1->bitstream_buffer_size);
675  memset(s->bitstream_buffer + s->bitstream_buffer_size, 0,
677  }
678 
679  // linesize dependend scratch buffer allocation
680  if (!s->edge_emu_buffer)
681  if (s1->linesize) {
682  if (ff_mpv_frame_size_alloc(s, s1->linesize) < 0) {
683  av_log(s->avctx, AV_LOG_ERROR, "Failed to allocate context "
684  "scratch buffers.\n");
685  return AVERROR(ENOMEM);
686  }
687  } else {
688  av_log(s->avctx, AV_LOG_ERROR, "Context scratch buffers could not "
689  "be allocated due to unknown size.\n");
690  return AVERROR_BUG;
691  }
692 
693  // MPEG2/interlacing info
694  memcpy(&s->progressive_sequence, &s1->progressive_sequence,
695  (char *) &s1->rtp_mode - (char *) &s1->progressive_sequence);
696 
697  if (!s1->first_field) {
698  s->last_pict_type = s1->pict_type;
699  if (s1->current_picture_ptr)
700  s->last_lambda_for[s1->pict_type] = s1->current_picture_ptr->f.quality;
701 
702  if (s1->pict_type != AV_PICTURE_TYPE_B) {
703  s->last_non_b_pict_type = s1->pict_type;
704  }
705  }
706 
707  return 0;
708 }
709 
717 {
718  s->y_dc_scale_table =
721  s->progressive_frame = 1;
722  s->progressive_sequence = 1;
724 
725  s->coded_picture_number = 0;
726  s->picture_number = 0;
727  s->input_picture_number = 0;
728 
729  s->picture_in_gop_number = 0;
730 
731  s->f_code = 1;
732  s->b_code = 1;
733 
734  s->picture_range_start = 0;
736 
737  s->slice_context_count = 1;
738 }
739 
746 {
748 }
749 
750 static int init_er(MpegEncContext *s)
751 {
752  ERContext *er = &s->er;
753  int mb_array_size = s->mb_height * s->mb_stride;
754  int i;
755 
756  er->avctx = s->avctx;
757  er->dsp = &s->dsp;
758 
759  er->mb_index2xy = s->mb_index2xy;
760  er->mb_num = s->mb_num;
761  er->mb_width = s->mb_width;
762  er->mb_height = s->mb_height;
763  er->mb_stride = s->mb_stride;
764  er->b8_stride = s->b8_stride;
765 
767  er->error_status_table = av_mallocz(mb_array_size);
768  if (!er->er_temp_buffer || !er->error_status_table)
769  goto fail;
770 
771  er->mbskip_table = s->mbskip_table;
772  er->mbintra_table = s->mbintra_table;
773 
774  for (i = 0; i < FF_ARRAY_ELEMS(s->dc_val); i++)
775  er->dc_val[i] = s->dc_val[i];
776 
778  er->opaque = s;
779 
780  return 0;
781 fail:
782  av_freep(&er->er_temp_buffer);
784  return AVERROR(ENOMEM);
785 }
786 
791 {
792  int y_size, c_size, yc_size, i, mb_array_size, mv_table_size, x, y;
793 
794  s->mb_width = (s->width + 15) / 16;
795  s->mb_stride = s->mb_width + 1;
796  s->b8_stride = s->mb_width * 2 + 1;
797  s->b4_stride = s->mb_width * 4 + 1;
798  mb_array_size = s->mb_height * s->mb_stride;
799  mv_table_size = (s->mb_height + 2) * s->mb_stride + 1;
800 
801  /* set default edge pos, will be overriden
802  * in decode_header if needed */
803  s->h_edge_pos = s->mb_width * 16;
804  s->v_edge_pos = s->mb_height * 16;
805 
806  s->mb_num = s->mb_width * s->mb_height;
807 
808  s->block_wrap[0] =
809  s->block_wrap[1] =
810  s->block_wrap[2] =
811  s->block_wrap[3] = s->b8_stride;
812  s->block_wrap[4] =
813  s->block_wrap[5] = s->mb_stride;
814 
815  y_size = s->b8_stride * (2 * s->mb_height + 1);
816  c_size = s->mb_stride * (s->mb_height + 1);
817  yc_size = y_size + 2 * c_size;
818 
819  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_index2xy, (s->mb_num + 1) * sizeof(int),
820  fail); // error ressilience code looks cleaner with this
821  for (y = 0; y < s->mb_height; y++)
822  for (x = 0; x < s->mb_width; x++)
823  s->mb_index2xy[x + y * s->mb_width] = x + y * s->mb_stride;
824 
825  s->mb_index2xy[s->mb_height * s->mb_width] =
826  (s->mb_height - 1) * s->mb_stride + s->mb_width; // FIXME really needed?
827 
828  if (s->encoding) {
829  /* Allocate MV tables */
831  mv_table_size * 2 * sizeof(int16_t), fail);
833  mv_table_size * 2 * sizeof(int16_t), fail);
835  mv_table_size * 2 * sizeof(int16_t), fail);
837  mv_table_size * 2 * sizeof(int16_t), fail);
839  mv_table_size * 2 * sizeof(int16_t), fail);
841  mv_table_size * 2 * sizeof(int16_t), fail);
842  s->p_mv_table = s->p_mv_table_base + s->mb_stride + 1;
846  s->mb_stride + 1;
848  s->mb_stride + 1;
850 
851  /* Allocate MB type table */
852  FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_type, mb_array_size *
853  sizeof(uint16_t), fail); // needed for encoding
854 
855  FF_ALLOCZ_OR_GOTO(s->avctx, s->lambda_table, mb_array_size *
856  sizeof(int), fail);
857 
859  mb_array_size * sizeof(float), fail);
861  mb_array_size * sizeof(float), fail);
862 
863  }
864 
865  if (s->codec_id == AV_CODEC_ID_MPEG4 ||
867  /* interlaced direct mode decoding tables */
868  for (i = 0; i < 2; i++) {
869  int j, k;
870  for (j = 0; j < 2; j++) {
871  for (k = 0; k < 2; k++) {
873  s->b_field_mv_table_base[i][j][k],
874  mv_table_size * 2 * sizeof(int16_t),
875  fail);
876  s->b_field_mv_table[i][j][k] = s->b_field_mv_table_base[i][j][k] +
877  s->mb_stride + 1;
878  }
880  mb_array_size * 2 * sizeof(uint8_t), fail);
882  mv_table_size * 2 * sizeof(int16_t), fail);
883  s->p_field_mv_table[i][j] = s->p_field_mv_table_base[i][j]
884  + s->mb_stride + 1;
885  }
887  mb_array_size * 2 * sizeof(uint8_t), fail);
888  }
889  }
890  if (s->out_format == FMT_H263) {
891  /* cbp values */
892  FF_ALLOCZ_OR_GOTO(s->avctx, s->coded_block_base, y_size, fail);
893  s->coded_block = s->coded_block_base + s->b8_stride + 1;
894 
895  /* cbp, ac_pred, pred_dir */
897  mb_array_size * sizeof(uint8_t), fail);
899  mb_array_size * sizeof(uint8_t), fail);
900  }
901 
902  if (s->h263_pred || s->h263_plus || !s->encoding) {
903  /* dc values */
904  // MN: we need these for error resilience of intra-frames
906  yc_size * sizeof(int16_t), fail);
907  s->dc_val[0] = s->dc_val_base + s->b8_stride + 1;
908  s->dc_val[1] = s->dc_val_base + y_size + s->mb_stride + 1;
909  s->dc_val[2] = s->dc_val[1] + c_size;
910  for (i = 0; i < yc_size; i++)
911  s->dc_val_base[i] = 1024;
912  }
913 
914  /* which mb is a intra block */
915  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
916  memset(s->mbintra_table, 1, mb_array_size);
917 
918  /* init macroblock skip table */
919  FF_ALLOCZ_OR_GOTO(s->avctx, s->mbskip_table, mb_array_size + 2, fail);
920  // Note the + 1 is for a quicker mpeg4 slice_end detection
921 
923  s->avctx->debug_mv) {
924  s->visualization_buffer[0] = av_malloc((s->mb_width * 16 +
925  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
926  s->visualization_buffer[1] = av_malloc((s->mb_width * 16 +
927  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
928  s->visualization_buffer[2] = av_malloc((s->mb_width * 16 +
929  2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
930  }
931 
932  return init_er(s);
933 fail:
934  return AVERROR(ENOMEM);
935 }
936 
942 {
943  int i;
944  int nb_slices = (HAVE_THREADS &&
946  s->avctx->thread_count : 1;
947 
948  if (s->encoding && s->avctx->slices)
949  nb_slices = s->avctx->slices;
950 
952  s->mb_height = (s->height + 31) / 32 * 2;
953  else if (s->codec_id != AV_CODEC_ID_H264)
954  s->mb_height = (s->height + 15) / 16;
955 
956  if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
958  "decoding to AV_PIX_FMT_NONE is not supported.\n");
959  return -1;
960  }
961 
962  if (nb_slices > MAX_THREADS || (nb_slices > s->mb_height && s->mb_height)) {
963  int max_slices;
964  if (s->mb_height)
965  max_slices = FFMIN(MAX_THREADS, s->mb_height);
966  else
967  max_slices = MAX_THREADS;
968  av_log(s->avctx, AV_LOG_WARNING, "too many threads/slices (%d),"
969  " reducing to %d\n", nb_slices, max_slices);
970  nb_slices = max_slices;
971  }
972 
973  if ((s->width || s->height) &&
974  av_image_check_size(s->width, s->height, 0, s->avctx))
975  return -1;
976 
978 
979  s->flags = s->avctx->flags;
980  s->flags2 = s->avctx->flags2;
981 
982  /* set chroma shifts */
984  &s->chroma_x_shift,
985  &s->chroma_y_shift);
986 
987  /* convert fourcc to upper case */
989 
991 
992  if (s->width && s->height) {
994 
995  if (s->encoding) {
996  if (s->msmpeg4_version) {
998  2 * 2 * (MAX_LEVEL + 1) *
999  (MAX_RUN + 1) * 2 * sizeof(int), fail);
1000  }
1001  FF_ALLOCZ_OR_GOTO(s->avctx, s->avctx->stats_out, 256, fail);
1002 
1004  64 * 32 * sizeof(int), fail);
1006  64 * 32 * sizeof(int), fail);
1008  64 * 32 * 2 * sizeof(uint16_t), fail);
1010  64 * 32 * 2 * sizeof(uint16_t), fail);
1012  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
1014  MAX_PICTURE_COUNT * sizeof(Picture *), fail);
1015 
1016  if (s->avctx->noise_reduction) {
1018  2 * 64 * sizeof(uint16_t), fail);
1019  }
1020  }
1021  }
1022 
1025  s->picture_count * sizeof(Picture), fail);
1026  for (i = 0; i < s->picture_count; i++) {
1028  }
1029 
1030  if (s->width && s->height) {
1031  if (init_context_frame(s))
1032  goto fail;
1033 
1034  s->parse_context.state = -1;
1035  }
1036 
1037  s->context_initialized = 1;
1038  s->thread_context[0] = s;
1039 
1040  if (s->width && s->height) {
1041  if (nb_slices > 1) {
1042  for (i = 1; i < nb_slices; i++) {
1043  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1044  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1045  }
1046 
1047  for (i = 0; i < nb_slices; i++) {
1048  if (init_duplicate_context(s->thread_context[i], s) < 0)
1049  goto fail;
1050  s->thread_context[i]->start_mb_y =
1051  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
1052  s->thread_context[i]->end_mb_y =
1053  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
1054  }
1055  } else {
1056  if (init_duplicate_context(s, s) < 0)
1057  goto fail;
1058  s->start_mb_y = 0;
1059  s->end_mb_y = s->mb_height;
1060  }
1061  s->slice_context_count = nb_slices;
1062  }
1063 
1064  return 0;
1065  fail:
1066  ff_MPV_common_end(s);
1067  return -1;
1068 }
1069 
1076 {
1077  int i, j, k;
1078 
1079  av_freep(&s->mb_type);
1086  s->p_mv_table = NULL;
1087  s->b_forw_mv_table = NULL;
1088  s->b_back_mv_table = NULL;
1091  s->b_direct_mv_table = NULL;
1092  for (i = 0; i < 2; i++) {
1093  for (j = 0; j < 2; j++) {
1094  for (k = 0; k < 2; k++) {
1095  av_freep(&s->b_field_mv_table_base[i][j][k]);
1096  s->b_field_mv_table[i][j][k] = NULL;
1097  }
1098  av_freep(&s->b_field_select_table[i][j]);
1099  av_freep(&s->p_field_mv_table_base[i][j]);
1100  s->p_field_mv_table[i][j] = NULL;
1101  }
1103  }
1104 
1105  av_freep(&s->dc_val_base);
1107  av_freep(&s->mbintra_table);
1108  av_freep(&s->cbp_table);
1109  av_freep(&s->pred_dir_table);
1110 
1111  av_freep(&s->mbskip_table);
1112 
1114  av_freep(&s->er.er_temp_buffer);
1115  av_freep(&s->mb_index2xy);
1116  av_freep(&s->lambda_table);
1117  av_freep(&s->cplx_tab);
1118  av_freep(&s->bits_tab);
1119 
1120  s->linesize = s->uvlinesize = 0;
1121 
1122  for (i = 0; i < 3; i++)
1124 
1125  return 0;
1126 }
1127 
1129 {
1130  int i, err = 0;
1131 
1132  if (s->slice_context_count > 1) {
1133  for (i = 0; i < s->slice_context_count; i++) {
1135  }
1136  for (i = 1; i < s->slice_context_count; i++) {
1137  av_freep(&s->thread_context[i]);
1138  }
1139  } else
1141 
1142  free_context_frame(s);
1143 
1144  if (s->picture)
1145  for (i = 0; i < s->picture_count; i++) {
1146  s->picture[i].needs_realloc = 1;
1147  }
1148 
1149  s->last_picture_ptr =
1150  s->next_picture_ptr =
1152 
1153  // init
1155  s->mb_height = (s->height + 31) / 32 * 2;
1156  else if (s->codec_id != AV_CODEC_ID_H264)
1157  s->mb_height = (s->height + 15) / 16;
1158 
1159  if ((s->width || s->height) &&
1160  av_image_check_size(s->width, s->height, 0, s->avctx))
1161  return AVERROR_INVALIDDATA;
1162 
1163  if ((err = init_context_frame(s)))
1164  goto fail;
1165 
1166  s->thread_context[0] = s;
1167 
1168  if (s->width && s->height) {
1169  int nb_slices = s->slice_context_count;
1170  if (nb_slices > 1) {
1171  for (i = 1; i < nb_slices; i++) {
1172  s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1173  memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1174  }
1175 
1176  for (i = 0; i < nb_slices; i++) {
1177  if (init_duplicate_context(s->thread_context[i], s) < 0)
1178  goto fail;
1179  s->thread_context[i]->start_mb_y =
1180  (s->mb_height * (i) + nb_slices / 2) / nb_slices;
1181  s->thread_context[i]->end_mb_y =
1182  (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
1183  }
1184  } else {
1185  if (init_duplicate_context(s, s) < 0)
1186  goto fail;
1187  s->start_mb_y = 0;
1188  s->end_mb_y = s->mb_height;
1189  }
1190  s->slice_context_count = nb_slices;
1191  }
1192 
1193  return 0;
1194  fail:
1195  ff_MPV_common_end(s);
1196  return err;
1197 }
1198 
1199 /* init common structure for both encoder and decoder */
1201 {
1202  int i;
1203 
1204  if (s->slice_context_count > 1) {
1205  for (i = 0; i < s->slice_context_count; i++) {
1207  }
1208  for (i = 1; i < s->slice_context_count; i++) {
1209  av_freep(&s->thread_context[i]);
1210  }
1211  s->slice_context_count = 1;
1212  } else free_duplicate_context(s);
1213 
1215  s->parse_context.buffer_size = 0;
1216 
1219 
1220  av_freep(&s->avctx->stats_out);
1221  av_freep(&s->ac_stats);
1222 
1223  av_freep(&s->q_intra_matrix);
1224  av_freep(&s->q_inter_matrix);
1227  av_freep(&s->input_picture);
1229  av_freep(&s->dct_offset);
1230 
1231  if (s->picture && !s->avctx->internal->is_copy) {
1232  for (i = 0; i < s->picture_count; i++) {
1233  free_picture(s, &s->picture[i]);
1234  }
1235  }
1236  av_freep(&s->picture);
1237 
1238  free_context_frame(s);
1239 
1242 
1243  s->context_initialized = 0;
1244  s->last_picture_ptr =
1245  s->next_picture_ptr =
1247  s->linesize = s->uvlinesize = 0;
1248 }
1249 
1251  uint8_t static_store[2][2 * MAX_RUN + MAX_LEVEL + 3])
1252 {
1253  int8_t max_level[MAX_RUN + 1], max_run[MAX_LEVEL + 1];
1254  uint8_t index_run[MAX_RUN + 1];
1255  int last, run, level, start, end, i;
1256 
1257  /* If table is static, we can quit if rl->max_level[0] is not NULL */
1258  if (static_store && rl->max_level[0])
1259  return;
1260 
1261  /* compute max_level[], max_run[] and index_run[] */
1262  for (last = 0; last < 2; last++) {
1263  if (last == 0) {
1264  start = 0;
1265  end = rl->last;
1266  } else {
1267  start = rl->last;
1268  end = rl->n;
1269  }
1270 
1271  memset(max_level, 0, MAX_RUN + 1);
1272  memset(max_run, 0, MAX_LEVEL + 1);
1273  memset(index_run, rl->n, MAX_RUN + 1);
1274  for (i = start; i < end; i++) {
1275  run = rl->table_run[i];
1276  level = rl->table_level[i];
1277  if (index_run[run] == rl->n)
1278  index_run[run] = i;
1279  if (level > max_level[run])
1280  max_level[run] = level;
1281  if (run > max_run[level])
1282  max_run[level] = run;
1283  }
1284  if (static_store)
1285  rl->max_level[last] = static_store[last];
1286  else
1287  rl->max_level[last] = av_malloc(MAX_RUN + 1);
1288  memcpy(rl->max_level[last], max_level, MAX_RUN + 1);
1289  if (static_store)
1290  rl->max_run[last] = static_store[last] + MAX_RUN + 1;
1291  else
1292  rl->max_run[last] = av_malloc(MAX_LEVEL + 1);
1293  memcpy(rl->max_run[last], max_run, MAX_LEVEL + 1);
1294  if (static_store)
1295  rl->index_run[last] = static_store[last] + MAX_RUN + MAX_LEVEL + 2;
1296  else
1297  rl->index_run[last] = av_malloc(MAX_RUN + 1);
1298  memcpy(rl->index_run[last], index_run, MAX_RUN + 1);
1299  }
1300 }
1301 
1303 {
1304  int i, q;
1305 
1306  for (q = 0; q < 32; q++) {
1307  int qmul = q * 2;
1308  int qadd = (q - 1) | 1;
1309 
1310  if (q == 0) {
1311  qmul = 1;
1312  qadd = 0;
1313  }
1314  for (i = 0; i < rl->vlc.table_size; i++) {
1315  int code = rl->vlc.table[i][0];
1316  int len = rl->vlc.table[i][1];
1317  int level, run;
1318 
1319  if (len == 0) { // illegal code
1320  run = 66;
1321  level = MAX_LEVEL;
1322  } else if (len < 0) { // more bits needed
1323  run = 0;
1324  level = code;
1325  } else {
1326  if (code == rl->n) { // esc
1327  run = 66;
1328  level = 0;
1329  } else {
1330  run = rl->table_run[code] + 1;
1331  level = rl->table_level[code] * qmul + qadd;
1332  if (code >= rl->last) run += 192;
1333  }
1334  }
1335  rl->rl_vlc[q][i].len = len;
1336  rl->rl_vlc[q][i].level = level;
1337  rl->rl_vlc[q][i].run = run;
1338  }
1339  }
1340 }
1341 
1342 void ff_release_unused_pictures(MpegEncContext*s, int remove_current)
1343 {
1344  int i;
1345 
1346  /* release non reference frames */
1347  for (i = 0; i < s->picture_count; i++) {
1348  if (s->picture[i].f.data[0] && !s->picture[i].f.reference &&
1349  (!s->picture[i].owner2 || s->picture[i].owner2 == s) &&
1350  (remove_current || &s->picture[i] != s->current_picture_ptr)
1351  /* && s->picture[i].type!= FF_BUFFER_TYPE_SHARED */) {
1352  free_frame_buffer(s, &s->picture[i]);
1353  }
1354  }
1355 }
1356 
1357 static inline int pic_is_unused(MpegEncContext *s, Picture *pic)
1358 {
1359  if (pic->f.data[0] == NULL)
1360  return 1;
1361  if (pic->needs_realloc && !(pic->f.reference & DELAYED_PIC_REF))
1362  if (!pic->owner2 || pic->owner2 == s)
1363  return 1;
1364  return 0;
1365 }
1366 
1367 static int find_unused_picture(MpegEncContext *s, int shared)
1368 {
1369  int i;
1370 
1371  if (shared) {
1372  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1373  if (s->picture[i].f.data[0] == NULL && s->picture[i].f.type == 0)
1374  return i;
1375  }
1376  } else {
1377  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1378  if (pic_is_unused(s, &s->picture[i]) && s->picture[i].f.type != 0)
1379  return i; // FIXME
1380  }
1381  for (i = s->picture_range_start; i < s->picture_range_end; i++) {
1382  if (pic_is_unused(s, &s->picture[i]))
1383  return i;
1384  }
1385  }
1386 
1387  return AVERROR_INVALIDDATA;
1388 }
1389 
1391 {
1392  int ret = find_unused_picture(s, shared);
1393 
1394  if (ret >= 0 && ret < s->picture_range_end) {
1395  if (s->picture[ret].needs_realloc) {
1396  s->picture[ret].needs_realloc = 0;
1397  free_picture(s, &s->picture[ret]);
1399  }
1400  }
1401  return ret;
1402 }
1403 
1405 {
1406  int intra, i;
1407 
1408  for (intra = 0; intra < 2; intra++) {
1409  if (s->dct_count[intra] > (1 << 16)) {
1410  for (i = 0; i < 64; i++) {
1411  s->dct_error_sum[intra][i] >>= 1;
1412  }
1413  s->dct_count[intra] >>= 1;
1414  }
1415 
1416  for (i = 0; i < 64; i++) {
1417  s->dct_offset[intra][i] = (s->avctx->noise_reduction *
1418  s->dct_count[intra] +
1419  s->dct_error_sum[intra][i] / 2) /
1420  (s->dct_error_sum[intra][i] + 1);
1421  }
1422  }
1423 }
1424 
1430 {
1431  int i;
1432  Picture *pic;
1433  s->mb_skipped = 0;
1434 
1435  /* mark & release old frames */
1436  if (s->out_format != FMT_H264 || s->codec_id == AV_CODEC_ID_SVQ3) {
1437  if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
1439  s->last_picture_ptr->f.data[0]) {
1440  if (s->last_picture_ptr->owner2 == s)
1442  }
1443 
1444  /* release forgotten pictures */
1445  /* if (mpeg124/h263) */
1446  if (!s->encoding) {
1447  for (i = 0; i < s->picture_count; i++) {
1448  if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
1449  &s->picture[i] != s->last_picture_ptr &&
1450  &s->picture[i] != s->next_picture_ptr &&
1451  s->picture[i].f.reference && !s->picture[i].needs_realloc) {
1452  if (!(avctx->active_thread_type & FF_THREAD_FRAME))
1453  av_log(avctx, AV_LOG_ERROR,
1454  "releasing zombie picture\n");
1455  free_frame_buffer(s, &s->picture[i]);
1456  }
1457  }
1458  }
1459  }
1460 
1461  if (!s->encoding) {
1463 
1464  if (s->current_picture_ptr &&
1465  s->current_picture_ptr->f.data[0] == NULL) {
1466  // we already have a unused image
1467  // (maybe it was set before reading the header)
1468  pic = s->current_picture_ptr;
1469  } else {
1470  i = ff_find_unused_picture(s, 0);
1471  if (i < 0) {
1472  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1473  return i;
1474  }
1475  pic = &s->picture[i];
1476  }
1477 
1478  pic->f.reference = 0;
1479  if (!s->droppable) {
1480  if (s->codec_id == AV_CODEC_ID_H264)
1481  pic->f.reference = s->picture_structure;
1482  else if (s->pict_type != AV_PICTURE_TYPE_B)
1483  pic->f.reference = 3;
1484  }
1485 
1487 
1488  if (ff_alloc_picture(s, pic, 0) < 0)
1489  return -1;
1490 
1491  s->current_picture_ptr = pic;
1492  // FIXME use only the vars from current_pic
1494  if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO ||
1496  if (s->picture_structure != PICT_FRAME)
1499  }
1503  }
1504 
1506  // if (s->flags && CODEC_FLAG_QSCALE)
1507  // s->current_picture_ptr->quality = s->new_picture_ptr->quality;
1509 
1511 
1512  if (s->pict_type != AV_PICTURE_TYPE_B) {
1514  if (!s->droppable)
1516  }
1517  av_dlog(s->avctx, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n",
1522  s->pict_type, s->droppable);
1523 
1524  if (s->codec_id != AV_CODEC_ID_H264) {
1525  if ((s->last_picture_ptr == NULL ||
1526  s->last_picture_ptr->f.data[0] == NULL) &&
1527  (s->pict_type != AV_PICTURE_TYPE_I ||
1528  s->picture_structure != PICT_FRAME)) {
1529  if (s->pict_type != AV_PICTURE_TYPE_I)
1530  av_log(avctx, AV_LOG_ERROR,
1531  "warning: first frame is no keyframe\n");
1532  else if (s->picture_structure != PICT_FRAME)
1533  av_log(avctx, AV_LOG_INFO,
1534  "allocate dummy last picture for field based first keyframe\n");
1535 
1536  /* Allocate a dummy frame */
1537  i = ff_find_unused_picture(s, 0);
1538  if (i < 0) {
1539  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1540  return i;
1541  }
1542  s->last_picture_ptr = &s->picture[i];
1543 
1544  s->last_picture_ptr->f.reference = 3;
1546 
1547  if (ff_alloc_picture(s, s->last_picture_ptr, 0) < 0) {
1548  s->last_picture_ptr = NULL;
1549  return -1;
1550  }
1551  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 0);
1552  ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 1);
1553  s->last_picture_ptr->f.reference = 3;
1554  }
1555  if ((s->next_picture_ptr == NULL ||
1556  s->next_picture_ptr->f.data[0] == NULL) &&
1557  s->pict_type == AV_PICTURE_TYPE_B) {
1558  /* Allocate a dummy frame */
1559  i = ff_find_unused_picture(s, 0);
1560  if (i < 0) {
1561  av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1562  return i;
1563  }
1564  s->next_picture_ptr = &s->picture[i];
1565 
1566  s->next_picture_ptr->f.reference = 3;
1568 
1569  if (ff_alloc_picture(s, s->next_picture_ptr, 0) < 0) {
1570  s->next_picture_ptr = NULL;
1571  return -1;
1572  }
1573  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 0);
1574  ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 1);
1575  s->next_picture_ptr->f.reference = 3;
1576  }
1577  }
1578 
1579  if (s->last_picture_ptr)
1581  if (s->next_picture_ptr)
1583 
1584  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME)) {
1585  if (s->next_picture_ptr)
1586  s->next_picture_ptr->owner2 = s;
1587  if (s->last_picture_ptr)
1588  s->last_picture_ptr->owner2 = s;
1589  }
1590 
1591  if (s->pict_type != AV_PICTURE_TYPE_I &&
1592  !(s->last_picture_ptr && s->last_picture_ptr->f.data[0])) {
1593  av_log(s, AV_LOG_ERROR,
1594  "Non-reference picture received and no reference available\n");
1595  return AVERROR_INVALIDDATA;
1596  }
1597 
1598  if (s->picture_structure!= PICT_FRAME && s->out_format != FMT_H264) {
1599  int i;
1600  for (i = 0; i < 4; i++) {
1602  s->current_picture.f.data[i] +=
1603  s->current_picture.f.linesize[i];
1604  }
1605  s->current_picture.f.linesize[i] *= 2;
1606  s->last_picture.f.linesize[i] *= 2;
1607  s->next_picture.f.linesize[i] *= 2;
1608  }
1609  }
1610 
1611  s->err_recognition = avctx->err_recognition;
1612 
1613  /* set dequantizer, we can't do it during init as
1614  * it might change for mpeg4 and we can't do it in the header
1615  * decode as init is not called for mpeg4 there yet */
1616  if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
1619  } else if (s->out_format == FMT_H263 || s->out_format == FMT_H261) {
1622  } else {
1625  }
1626 
1627  if (s->dct_error_sum) {
1628  assert(s->avctx->noise_reduction && s->encoding);
1630  }
1631 
1633  return ff_xvmc_field_start(s, avctx);
1634 
1635  return 0;
1636 }
1637 
1638 /* generic function for encode/decode called after a
1639  * frame has been coded/decoded. */
1641 {
1642  int i;
1643  /* redraw edges for the frame if decoding didn't complete */
1644  // just to make sure that all data is rendered.
1646  ff_xvmc_field_end(s);
1647  } else if ((s->er.error_count || s->encoding) &&
1648  !s->avctx->hwaccel &&
1650  s->unrestricted_mv &&
1652  !s->intra_only &&
1653  !(s->flags & CODEC_FLAG_EMU_EDGE)) {
1655  int hshift = desc->log2_chroma_w;
1656  int vshift = desc->log2_chroma_h;
1658  s->h_edge_pos, s->v_edge_pos,
1660  EDGE_TOP | EDGE_BOTTOM);
1662  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1663  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1664  EDGE_TOP | EDGE_BOTTOM);
1666  s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1667  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1668  EDGE_TOP | EDGE_BOTTOM);
1669  }
1670 
1671  emms_c();
1672 
1673  s->last_pict_type = s->pict_type;
1675  if (s->pict_type!= AV_PICTURE_TYPE_B) {
1677  }
1678 #if 0
1679  /* copy back current_picture variables */
1680  for (i = 0; i < MAX_PICTURE_COUNT; i++) {
1681  if (s->picture[i].f.data[0] == s->current_picture.f.data[0]) {
1682  s->picture[i] = s->current_picture;
1683  break;
1684  }
1685  }
1686  assert(i < MAX_PICTURE_COUNT);
1687 #endif
1688 
1689  if (s->encoding) {
1690  /* release non-reference frames */
1691  for (i = 0; i < s->picture_count; i++) {
1692  if (s->picture[i].f.data[0] && !s->picture[i].f.reference
1693  /* && s->picture[i].type != FF_BUFFER_TYPE_SHARED */) {
1694  free_frame_buffer(s, &s->picture[i]);
1695  }
1696  }
1697  }
1698  // clear copies, to avoid confusion
1699 #if 0
1700  memset(&s->last_picture, 0, sizeof(Picture));
1701  memset(&s->next_picture, 0, sizeof(Picture));
1702  memset(&s->current_picture, 0, sizeof(Picture));
1703 #endif
1705 
1708  }
1709 }
1710 
1718 static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
1719  int w, int h, int stride, int color)
1720 {
1721  int x, y, fr, f;
1722 
1723  sx = av_clip(sx, 0, w - 1);
1724  sy = av_clip(sy, 0, h - 1);
1725  ex = av_clip(ex, 0, w - 1);
1726  ey = av_clip(ey, 0, h - 1);
1727 
1728  buf[sy * stride + sx] += color;
1729 
1730  if (FFABS(ex - sx) > FFABS(ey - sy)) {
1731  if (sx > ex) {
1732  FFSWAP(int, sx, ex);
1733  FFSWAP(int, sy, ey);
1734  }
1735  buf += sx + sy * stride;
1736  ex -= sx;
1737  f = ((ey - sy) << 16) / ex;
1738  for (x = 0; x <= ex; x++) {
1739  y = (x * f) >> 16;
1740  fr = (x * f) & 0xFFFF;
1741  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1742  buf[(y + 1) * stride + x] += (color * fr ) >> 16;
1743  }
1744  } else {
1745  if (sy > ey) {
1746  FFSWAP(int, sx, ex);
1747  FFSWAP(int, sy, ey);
1748  }
1749  buf += sx + sy * stride;
1750  ey -= sy;
1751  if (ey)
1752  f = ((ex - sx) << 16) / ey;
1753  else
1754  f = 0;
1755  for (y = 0; y = ey; y++) {
1756  x = (y * f) >> 16;
1757  fr = (y * f) & 0xFFFF;
1758  buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1759  buf[y * stride + x + 1] += (color * fr ) >> 16;
1760  }
1761  }
1762 }
1763 
1771 static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
1772  int ey, int w, int h, int stride, int color)
1773 {
1774  int dx,dy;
1775 
1776  sx = av_clip(sx, -100, w + 100);
1777  sy = av_clip(sy, -100, h + 100);
1778  ex = av_clip(ex, -100, w + 100);
1779  ey = av_clip(ey, -100, h + 100);
1780 
1781  dx = ex - sx;
1782  dy = ey - sy;
1783 
1784  if (dx * dx + dy * dy > 3 * 3) {
1785  int rx = dx + dy;
1786  int ry = -dx + dy;
1787  int length = ff_sqrt((rx * rx + ry * ry) << 8);
1788 
1789  // FIXME subpixel accuracy
1790  rx = ROUNDED_DIV(rx * 3 << 4, length);
1791  ry = ROUNDED_DIV(ry * 3 << 4, length);
1792 
1793  draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
1794  draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
1795  }
1796  draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
1797 }
1798 
1803 {
1804  if (s->avctx->hwaccel || !pict || !pict->mb_type)
1805  return;
1806 
1808  int x,y;
1809 
1810  av_log(s->avctx,AV_LOG_DEBUG,"New frame, type: ");
1811  switch (pict->pict_type) {
1812  case AV_PICTURE_TYPE_I:
1813  av_log(s->avctx,AV_LOG_DEBUG,"I\n");
1814  break;
1815  case AV_PICTURE_TYPE_P:
1816  av_log(s->avctx,AV_LOG_DEBUG,"P\n");
1817  break;
1818  case AV_PICTURE_TYPE_B:
1819  av_log(s->avctx,AV_LOG_DEBUG,"B\n");
1820  break;
1821  case AV_PICTURE_TYPE_S:
1822  av_log(s->avctx,AV_LOG_DEBUG,"S\n");
1823  break;
1824  case AV_PICTURE_TYPE_SI:
1825  av_log(s->avctx,AV_LOG_DEBUG,"SI\n");
1826  break;
1827  case AV_PICTURE_TYPE_SP:
1828  av_log(s->avctx,AV_LOG_DEBUG,"SP\n");
1829  break;
1830  }
1831  for (y = 0; y < s->mb_height; y++) {
1832  for (x = 0; x < s->mb_width; x++) {
1833  if (s->avctx->debug & FF_DEBUG_SKIP) {
1834  int count = s->mbskip_table[x + y * s->mb_stride];
1835  if (count > 9)
1836  count = 9;
1837  av_log(s->avctx, AV_LOG_DEBUG, "%1d", count);
1838  }
1839  if (s->avctx->debug & FF_DEBUG_QP) {
1840  av_log(s->avctx, AV_LOG_DEBUG, "%2d",
1841  pict->qscale_table[x + y * s->mb_stride]);
1842  }
1843  if (s->avctx->debug & FF_DEBUG_MB_TYPE) {
1844  int mb_type = pict->mb_type[x + y * s->mb_stride];
1845  // Type & MV direction
1846  if (IS_PCM(mb_type))
1847  av_log(s->avctx, AV_LOG_DEBUG, "P");
1848  else if (IS_INTRA(mb_type) && IS_ACPRED(mb_type))
1849  av_log(s->avctx, AV_LOG_DEBUG, "A");
1850  else if (IS_INTRA4x4(mb_type))
1851  av_log(s->avctx, AV_LOG_DEBUG, "i");
1852  else if (IS_INTRA16x16(mb_type))
1853  av_log(s->avctx, AV_LOG_DEBUG, "I");
1854  else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type))
1855  av_log(s->avctx, AV_LOG_DEBUG, "d");
1856  else if (IS_DIRECT(mb_type))
1857  av_log(s->avctx, AV_LOG_DEBUG, "D");
1858  else if (IS_GMC(mb_type) && IS_SKIP(mb_type))
1859  av_log(s->avctx, AV_LOG_DEBUG, "g");
1860  else if (IS_GMC(mb_type))
1861  av_log(s->avctx, AV_LOG_DEBUG, "G");
1862  else if (IS_SKIP(mb_type))
1863  av_log(s->avctx, AV_LOG_DEBUG, "S");
1864  else if (!USES_LIST(mb_type, 1))
1865  av_log(s->avctx, AV_LOG_DEBUG, ">");
1866  else if (!USES_LIST(mb_type, 0))
1867  av_log(s->avctx, AV_LOG_DEBUG, "<");
1868  else {
1869  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
1870  av_log(s->avctx, AV_LOG_DEBUG, "X");
1871  }
1872 
1873  // segmentation
1874  if (IS_8X8(mb_type))
1875  av_log(s->avctx, AV_LOG_DEBUG, "+");
1876  else if (IS_16X8(mb_type))
1877  av_log(s->avctx, AV_LOG_DEBUG, "-");
1878  else if (IS_8X16(mb_type))
1879  av_log(s->avctx, AV_LOG_DEBUG, "|");
1880  else if (IS_INTRA(mb_type) || IS_16X16(mb_type))
1881  av_log(s->avctx, AV_LOG_DEBUG, " ");
1882  else
1883  av_log(s->avctx, AV_LOG_DEBUG, "?");
1884 
1885 
1886  if (IS_INTERLACED(mb_type))
1887  av_log(s->avctx, AV_LOG_DEBUG, "=");
1888  else
1889  av_log(s->avctx, AV_LOG_DEBUG, " ");
1890  }
1891  }
1892  av_log(s->avctx, AV_LOG_DEBUG, "\n");
1893  }
1894  }
1895 
1896  if ((s->avctx->debug & (FF_DEBUG_VIS_QP | FF_DEBUG_VIS_MB_TYPE)) ||
1897  (s->avctx->debug_mv)) {
1898  const int shift = 1 + s->quarter_sample;
1899  int mb_y;
1900  uint8_t *ptr;
1901  int i;
1902  int h_chroma_shift, v_chroma_shift, block_height;
1903  const int width = s->avctx->width;
1904  const int height = s->avctx->height;
1905  const int mv_sample_log2 = 4 - pict->motion_subsample_log2;
1906  const int mv_stride = (s->mb_width << mv_sample_log2) +
1907  (s->codec_id == AV_CODEC_ID_H264 ? 0 : 1);
1908  s->low_delay = 0; // needed to see the vectors without trashing the buffers
1909 
1911  &h_chroma_shift, &v_chroma_shift);
1912  for (i = 0; i < 3; i++) {
1913  memcpy(s->visualization_buffer[i], pict->data[i],
1914  (i == 0) ? pict->linesize[i] * height:
1915  pict->linesize[i] * height >> v_chroma_shift);
1916  pict->data[i] = s->visualization_buffer[i];
1917  }
1918  pict->type = FF_BUFFER_TYPE_COPY;
1919  ptr = pict->data[0];
1920  block_height = 16 >> v_chroma_shift;
1921 
1922  for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
1923  int mb_x;
1924  for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
1925  const int mb_index = mb_x + mb_y * s->mb_stride;
1926  if ((s->avctx->debug_mv) && pict->motion_val) {
1927  int type;
1928  for (type = 0; type < 3; type++) {
1929  int direction = 0;
1930  switch (type) {
1931  case 0:
1932  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_P_FOR)) ||
1933  (pict->pict_type!= AV_PICTURE_TYPE_P))
1934  continue;
1935  direction = 0;
1936  break;
1937  case 1:
1938  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_FOR)) ||
1939  (pict->pict_type!= AV_PICTURE_TYPE_B))
1940  continue;
1941  direction = 0;
1942  break;
1943  case 2:
1944  if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_BACK)) ||
1945  (pict->pict_type!= AV_PICTURE_TYPE_B))
1946  continue;
1947  direction = 1;
1948  break;
1949  }
1950  if (!USES_LIST(pict->mb_type[mb_index], direction))
1951  continue;
1952 
1953  if (IS_8X8(pict->mb_type[mb_index])) {
1954  int i;
1955  for (i = 0; i < 4; i++) {
1956  int sx = mb_x * 16 + 4 + 8 * (i & 1);
1957  int sy = mb_y * 16 + 4 + 8 * (i >> 1);
1958  int xy = (mb_x * 2 + (i & 1) +
1959  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
1960  int mx = (pict->motion_val[direction][xy][0] >> shift) + sx;
1961  int my = (pict->motion_val[direction][xy][1] >> shift) + sy;
1962  draw_arrow(ptr, sx, sy, mx, my, width,
1963  height, s->linesize, 100);
1964  }
1965  } else if (IS_16X8(pict->mb_type[mb_index])) {
1966  int i;
1967  for (i = 0; i < 2; i++) {
1968  int sx = mb_x * 16 + 8;
1969  int sy = mb_y * 16 + 4 + 8 * i;
1970  int xy = (mb_x * 2 + (mb_y * 2 + i) * mv_stride) << (mv_sample_log2 - 1);
1971  int mx = (pict->motion_val[direction][xy][0] >> shift);
1972  int my = (pict->motion_val[direction][xy][1] >> shift);
1973 
1974  if (IS_INTERLACED(pict->mb_type[mb_index]))
1975  my *= 2;
1976 
1977  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1978  height, s->linesize, 100);
1979  }
1980  } else if (IS_8X16(pict->mb_type[mb_index])) {
1981  int i;
1982  for (i = 0; i < 2; i++) {
1983  int sx = mb_x * 16 + 4 + 8 * i;
1984  int sy = mb_y * 16 + 8;
1985  int xy = (mb_x * 2 + i + mb_y * 2 * mv_stride) << (mv_sample_log2 - 1);
1986  int mx = pict->motion_val[direction][xy][0] >> shift;
1987  int my = pict->motion_val[direction][xy][1] >> shift;
1988 
1989  if (IS_INTERLACED(pict->mb_type[mb_index]))
1990  my *= 2;
1991 
1992  draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1993  height, s->linesize, 100);
1994  }
1995  } else {
1996  int sx = mb_x * 16 + 8;
1997  int sy = mb_y * 16 + 8;
1998  int xy = (mb_x + mb_y * mv_stride) << mv_sample_log2;
1999  int mx = pict->motion_val[direction][xy][0] >> shift + sx;
2000  int my = pict->motion_val[direction][xy][1] >> shift + sy;
2001  draw_arrow(ptr, sx, sy, mx, my, width, height, s->linesize, 100);
2002  }
2003  }
2004  }
2005  if ((s->avctx->debug & FF_DEBUG_VIS_QP) && pict->motion_val) {
2006  uint64_t c = (pict->qscale_table[mb_index] * 128 / 31) *
2007  0x0101010101010101ULL;
2008  int y;
2009  for (y = 0; y < block_height; y++) {
2010  *(uint64_t *)(pict->data[1] + 8 * mb_x +
2011  (block_height * mb_y + y) *
2012  pict->linesize[1]) = c;
2013  *(uint64_t *)(pict->data[2] + 8 * mb_x +
2014  (block_height * mb_y + y) *
2015  pict->linesize[2]) = c;
2016  }
2017  }
2018  if ((s->avctx->debug & FF_DEBUG_VIS_MB_TYPE) &&
2019  pict->motion_val) {
2020  int mb_type = pict->mb_type[mb_index];
2021  uint64_t u,v;
2022  int y;
2023 #define COLOR(theta, r) \
2024  u = (int)(128 + r * cos(theta * 3.141592 / 180)); \
2025  v = (int)(128 + r * sin(theta * 3.141592 / 180));
2026 
2027 
2028  u = v = 128;
2029  if (IS_PCM(mb_type)) {
2030  COLOR(120, 48)
2031  } else if ((IS_INTRA(mb_type) && IS_ACPRED(mb_type)) ||
2032  IS_INTRA16x16(mb_type)) {
2033  COLOR(30, 48)
2034  } else if (IS_INTRA4x4(mb_type)) {
2035  COLOR(90, 48)
2036  } else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type)) {
2037  // COLOR(120, 48)
2038  } else if (IS_DIRECT(mb_type)) {
2039  COLOR(150, 48)
2040  } else if (IS_GMC(mb_type) && IS_SKIP(mb_type)) {
2041  COLOR(170, 48)
2042  } else if (IS_GMC(mb_type)) {
2043  COLOR(190, 48)
2044  } else if (IS_SKIP(mb_type)) {
2045  // COLOR(180, 48)
2046  } else if (!USES_LIST(mb_type, 1)) {
2047  COLOR(240, 48)
2048  } else if (!USES_LIST(mb_type, 0)) {
2049  COLOR(0, 48)
2050  } else {
2051  assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
2052  COLOR(300,48)
2053  }
2054 
2055  u *= 0x0101010101010101ULL;
2056  v *= 0x0101010101010101ULL;
2057  for (y = 0; y < block_height; y++) {
2058  *(uint64_t *)(pict->data[1] + 8 * mb_x +
2059  (block_height * mb_y + y) * pict->linesize[1]) = u;
2060  *(uint64_t *)(pict->data[2] + 8 * mb_x +
2061  (block_height * mb_y + y) * pict->linesize[2]) = v;
2062  }
2063 
2064  // segmentation
2065  if (IS_8X8(mb_type) || IS_16X8(mb_type)) {
2066  *(uint64_t *)(pict->data[0] + 16 * mb_x + 0 +
2067  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2068  *(uint64_t *)(pict->data[0] + 16 * mb_x + 8 +
2069  (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2070  }
2071  if (IS_8X8(mb_type) || IS_8X16(mb_type)) {
2072  for (y = 0; y < 16; y++)
2073  pict->data[0][16 * mb_x + 8 + (16 * mb_y + y) *
2074  pict->linesize[0]] ^= 0x80;
2075  }
2076  if (IS_8X8(mb_type) && mv_sample_log2 >= 2) {
2077  int dm = 1 << (mv_sample_log2 - 2);
2078  for (i = 0; i < 4; i++) {
2079  int sx = mb_x * 16 + 8 * (i & 1);
2080  int sy = mb_y * 16 + 8 * (i >> 1);
2081  int xy = (mb_x * 2 + (i & 1) +
2082  (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
2083  // FIXME bidir
2084  int32_t *mv = (int32_t *) &pict->motion_val[0][xy];
2085  if (mv[0] != mv[dm] ||
2086  mv[dm * mv_stride] != mv[dm * (mv_stride + 1)])
2087  for (y = 0; y < 8; y++)
2088  pict->data[0][sx + 4 + (sy + y) * pict->linesize[0]] ^= 0x80;
2089  if (mv[0] != mv[dm * mv_stride] || mv[dm] != mv[dm * (mv_stride + 1)])
2090  *(uint64_t *)(pict->data[0] + sx + (sy + 4) *
2091  pict->linesize[0]) ^= 0x8080808080808080ULL;
2092  }
2093  }
2094 
2095  if (IS_INTERLACED(mb_type) &&
2096  s->codec_id == AV_CODEC_ID_H264) {
2097  // hmm
2098  }
2099  }
2100  s->mbskip_table[mb_index] = 0;
2101  }
2102  }
2103  }
2104 }
2105 
2110 {
2111  int my_max = INT_MIN, my_min = INT_MAX, qpel_shift = !s->quarter_sample;
2112  int my, off, i, mvs;
2113 
2114  if (s->picture_structure != PICT_FRAME || s->mcsel)
2115  goto unhandled;
2116 
2117  switch (s->mv_type) {
2118  case MV_TYPE_16X16:
2119  mvs = 1;
2120  break;
2121  case MV_TYPE_16X8:
2122  mvs = 2;
2123  break;
2124  case MV_TYPE_8X8:
2125  mvs = 4;
2126  break;
2127  default:
2128  goto unhandled;
2129  }
2130 
2131  for (i = 0; i < mvs; i++) {
2132  my = s->mv[dir][i][1]<<qpel_shift;
2133  my_max = FFMAX(my_max, my);
2134  my_min = FFMIN(my_min, my);
2135  }
2136 
2137  off = (FFMAX(-my_min, my_max) + 63) >> 6;
2138 
2139  return FFMIN(FFMAX(s->mb_y + off, 0), s->mb_height-1);
2140 unhandled:
2141  return s->mb_height-1;
2142 }
2143 
2144 /* put block[] to dest[] */
2145 static inline void put_dct(MpegEncContext *s,
2146  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2147 {
2148  s->dct_unquantize_intra(s, block, i, qscale);
2149  s->dsp.idct_put (dest, line_size, block);
2150 }
2151 
2152 /* add block[] to dest[] */
2153 static inline void add_dct(MpegEncContext *s,
2154  DCTELEM *block, int i, uint8_t *dest, int line_size)
2155 {
2156  if (s->block_last_index[i] >= 0) {
2157  s->dsp.idct_add (dest, line_size, block);
2158  }
2159 }
2160 
2161 static inline void add_dequant_dct(MpegEncContext *s,
2162  DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
2163 {
2164  if (s->block_last_index[i] >= 0) {
2165  s->dct_unquantize_inter(s, block, i, qscale);
2166 
2167  s->dsp.idct_add (dest, line_size, block);
2168  }
2169 }
2170 
2175 {
2176  int wrap = s->b8_stride;
2177  int xy = s->block_index[0];
2178 
2179  s->dc_val[0][xy ] =
2180  s->dc_val[0][xy + 1 ] =
2181  s->dc_val[0][xy + wrap] =
2182  s->dc_val[0][xy + 1 + wrap] = 1024;
2183  /* ac pred */
2184  memset(s->ac_val[0][xy ], 0, 32 * sizeof(int16_t));
2185  memset(s->ac_val[0][xy + wrap], 0, 32 * sizeof(int16_t));
2186  if (s->msmpeg4_version>=3) {
2187  s->coded_block[xy ] =
2188  s->coded_block[xy + 1 ] =
2189  s->coded_block[xy + wrap] =
2190  s->coded_block[xy + 1 + wrap] = 0;
2191  }
2192  /* chroma */
2193  wrap = s->mb_stride;
2194  xy = s->mb_x + s->mb_y * wrap;
2195  s->dc_val[1][xy] =
2196  s->dc_val[2][xy] = 1024;
2197  /* ac pred */
2198  memset(s->ac_val[1][xy], 0, 16 * sizeof(int16_t));
2199  memset(s->ac_val[2][xy], 0, 16 * sizeof(int16_t));
2200 
2201  s->mbintra_table[xy]= 0;
2202 }
2203 
2204 /* generic function called after a macroblock has been parsed by the
2205  decoder or after it has been encoded by the encoder.
2206 
2207  Important variables used:
2208  s->mb_intra : true if intra macroblock
2209  s->mv_dir : motion vector direction
2210  s->mv_type : motion vector type
2211  s->mv : motion vector
2212  s->interlaced_dct : true if interlaced dct used (mpeg2)
2213  */
2214 static av_always_inline
2216  int is_mpeg12)
2217 {
2218  const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
2220  ff_xvmc_decode_mb(s);//xvmc uses pblocks
2221  return;
2222  }
2223 
2224  if(s->avctx->debug&FF_DEBUG_DCT_COEFF) {
2225  /* save DCT coefficients */
2226  int i,j;
2227  DCTELEM *dct = &s->current_picture.f.dct_coeff[mb_xy * 64 * 6];
2228  av_log(s->avctx, AV_LOG_DEBUG, "DCT coeffs of MB at %dx%d:\n", s->mb_x, s->mb_y);
2229  for(i=0; i<6; i++){
2230  for(j=0; j<64; j++){
2231  *dct++ = block[i][s->dsp.idct_permutation[j]];
2232  av_log(s->avctx, AV_LOG_DEBUG, "%5d", dct[-1]);
2233  }
2234  av_log(s->avctx, AV_LOG_DEBUG, "\n");
2235  }
2236  }
2237 
2238  s->current_picture.f.qscale_table[mb_xy] = s->qscale;
2239 
2240  /* update DC predictors for P macroblocks */
2241  if (!s->mb_intra) {
2242  if (!is_mpeg12 && (s->h263_pred || s->h263_aic)) {
2243  if(s->mbintra_table[mb_xy])
2245  } else {
2246  s->last_dc[0] =
2247  s->last_dc[1] =
2248  s->last_dc[2] = 128 << s->intra_dc_precision;
2249  }
2250  }
2251  else if (!is_mpeg12 && (s->h263_pred || s->h263_aic))
2252  s->mbintra_table[mb_xy]=1;
2253 
2254  if ((s->flags&CODEC_FLAG_PSNR) || !(s->encoding && (s->intra_only || s->pict_type==AV_PICTURE_TYPE_B) && s->avctx->mb_decision != FF_MB_DECISION_RD)) { //FIXME precalc
2255  uint8_t *dest_y, *dest_cb, *dest_cr;
2256  int dct_linesize, dct_offset;
2257  op_pixels_func (*op_pix)[4];
2258  qpel_mc_func (*op_qpix)[16];
2259  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2260  const int uvlinesize = s->current_picture.f.linesize[1];
2261  const int readable= s->pict_type != AV_PICTURE_TYPE_B || s->encoding || s->avctx->draw_horiz_band;
2262  const int block_size = 8;
2263 
2264  /* avoid copy if macroblock skipped in last frame too */
2265  /* skip only during decoding as we might trash the buffers during encoding a bit */
2266  if(!s->encoding){
2267  uint8_t *mbskip_ptr = &s->mbskip_table[mb_xy];
2268 
2269  if (s->mb_skipped) {
2270  s->mb_skipped= 0;
2271  assert(s->pict_type!=AV_PICTURE_TYPE_I);
2272  *mbskip_ptr = 1;
2273  } else if(!s->current_picture.f.reference) {
2274  *mbskip_ptr = 1;
2275  } else{
2276  *mbskip_ptr = 0; /* not skipped */
2277  }
2278  }
2279 
2280  dct_linesize = linesize << s->interlaced_dct;
2281  dct_offset = s->interlaced_dct ? linesize : linesize * block_size;
2282 
2283  if(readable){
2284  dest_y= s->dest[0];
2285  dest_cb= s->dest[1];
2286  dest_cr= s->dest[2];
2287  }else{
2288  dest_y = s->b_scratchpad;
2289  dest_cb= s->b_scratchpad+16*linesize;
2290  dest_cr= s->b_scratchpad+32*linesize;
2291  }
2292 
2293  if (!s->mb_intra) {
2294  /* motion handling */
2295  /* decoding or more than one mb_type (MC was already done otherwise) */
2296  if(!s->encoding){
2297 
2299  if (s->mv_dir & MV_DIR_FORWARD) {
2302  0);
2303  }
2304  if (s->mv_dir & MV_DIR_BACKWARD) {
2307  0);
2308  }
2309  }
2310 
2311  op_qpix= s->me.qpel_put;
2312  if ((!s->no_rounding) || s->pict_type==AV_PICTURE_TYPE_B){
2313  op_pix = s->dsp.put_pixels_tab;
2314  }else{
2315  op_pix = s->dsp.put_no_rnd_pixels_tab;
2316  }
2317  if (s->mv_dir & MV_DIR_FORWARD) {
2318  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
2319  op_pix = s->dsp.avg_pixels_tab;
2320  op_qpix= s->me.qpel_avg;
2321  }
2322  if (s->mv_dir & MV_DIR_BACKWARD) {
2323  ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
2324  }
2325  }
2326 
2327  /* skip dequant / idct if we are really late ;) */
2328  if(s->avctx->skip_idct){
2331  || s->avctx->skip_idct >= AVDISCARD_ALL)
2332  goto skip_idct;
2333  }
2334 
2335  /* add dct residue */
2337  || (s->codec_id==AV_CODEC_ID_MPEG4 && !s->mpeg_quant))){
2338  add_dequant_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2339  add_dequant_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2340  add_dequant_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2341  add_dequant_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2342 
2343  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2344  if (s->chroma_y_shift){
2345  add_dequant_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2346  add_dequant_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2347  }else{
2348  dct_linesize >>= 1;
2349  dct_offset >>=1;
2350  add_dequant_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2351  add_dequant_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2352  add_dequant_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2353  add_dequant_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2354  }
2355  }
2356  } else if(is_mpeg12 || (s->codec_id != AV_CODEC_ID_WMV2)){
2357  add_dct(s, block[0], 0, dest_y , dct_linesize);
2358  add_dct(s, block[1], 1, dest_y + block_size, dct_linesize);
2359  add_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize);
2360  add_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize);
2361 
2362  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2363  if(s->chroma_y_shift){//Chroma420
2364  add_dct(s, block[4], 4, dest_cb, uvlinesize);
2365  add_dct(s, block[5], 5, dest_cr, uvlinesize);
2366  }else{
2367  //chroma422
2368  dct_linesize = uvlinesize << s->interlaced_dct;
2369  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2370 
2371  add_dct(s, block[4], 4, dest_cb, dct_linesize);
2372  add_dct(s, block[5], 5, dest_cr, dct_linesize);
2373  add_dct(s, block[6], 6, dest_cb+dct_offset, dct_linesize);
2374  add_dct(s, block[7], 7, dest_cr+dct_offset, dct_linesize);
2375  if(!s->chroma_x_shift){//Chroma444
2376  add_dct(s, block[8], 8, dest_cb+8, dct_linesize);
2377  add_dct(s, block[9], 9, dest_cr+8, dct_linesize);
2378  add_dct(s, block[10], 10, dest_cb+8+dct_offset, dct_linesize);
2379  add_dct(s, block[11], 11, dest_cr+8+dct_offset, dct_linesize);
2380  }
2381  }
2382  }//fi gray
2383  }
2385  ff_wmv2_add_mb(s, block, dest_y, dest_cb, dest_cr);
2386  }
2387  } else {
2388  /* dct only in intra block */
2390  put_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2391  put_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2392  put_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2393  put_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
2394 
2395  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2396  if(s->chroma_y_shift){
2397  put_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2398  put_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2399  }else{
2400  dct_offset >>=1;
2401  dct_linesize >>=1;
2402  put_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2403  put_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2404  put_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2405  put_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2406  }
2407  }
2408  }else{
2409  s->dsp.idct_put(dest_y , dct_linesize, block[0]);
2410  s->dsp.idct_put(dest_y + block_size, dct_linesize, block[1]);
2411  s->dsp.idct_put(dest_y + dct_offset , dct_linesize, block[2]);
2412  s->dsp.idct_put(dest_y + dct_offset + block_size, dct_linesize, block[3]);
2413 
2414  if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2415  if(s->chroma_y_shift){
2416  s->dsp.idct_put(dest_cb, uvlinesize, block[4]);
2417  s->dsp.idct_put(dest_cr, uvlinesize, block[5]);
2418  }else{
2419 
2420  dct_linesize = uvlinesize << s->interlaced_dct;
2421  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
2422 
2423  s->dsp.idct_put(dest_cb, dct_linesize, block[4]);
2424  s->dsp.idct_put(dest_cr, dct_linesize, block[5]);
2425  s->dsp.idct_put(dest_cb + dct_offset, dct_linesize, block[6]);
2426  s->dsp.idct_put(dest_cr + dct_offset, dct_linesize, block[7]);
2427  if(!s->chroma_x_shift){//Chroma444
2428  s->dsp.idct_put(dest_cb + 8, dct_linesize, block[8]);
2429  s->dsp.idct_put(dest_cr + 8, dct_linesize, block[9]);
2430  s->dsp.idct_put(dest_cb + 8 + dct_offset, dct_linesize, block[10]);
2431  s->dsp.idct_put(dest_cr + 8 + dct_offset, dct_linesize, block[11]);
2432  }
2433  }
2434  }//gray
2435  }
2436  }
2437 skip_idct:
2438  if(!readable){
2439  s->dsp.put_pixels_tab[0][0](s->dest[0], dest_y , linesize,16);
2440  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[1], dest_cb, uvlinesize,16 >> s->chroma_y_shift);
2441  s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[2], dest_cr, uvlinesize,16 >> s->chroma_y_shift);
2442  }
2443  }
2444 }
2445 
2447 #if !CONFIG_SMALL
2448  if(s->out_format == FMT_MPEG1) {
2449  MPV_decode_mb_internal(s, block, 1);
2450  } else
2451 #endif
2452  MPV_decode_mb_internal(s, block, 0);
2453 }
2454 
2459  Picture *last, int y, int h, int picture_structure,
2460  int first_field, int draw_edges, int low_delay,
2461  int v_edge_pos, int h_edge_pos)
2462 {
2463  const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
2464  int hshift = desc->log2_chroma_w;
2465  int vshift = desc->log2_chroma_h;
2466  const int field_pic = picture_structure != PICT_FRAME;
2467  if(field_pic){
2468  h <<= 1;
2469  y <<= 1;
2470  }
2471 
2472  if (!avctx->hwaccel &&
2474  draw_edges &&
2475  cur->f.reference &&
2476  !(avctx->flags & CODEC_FLAG_EMU_EDGE)) {
2477  int *linesize = cur->f.linesize;
2478  int sides = 0, edge_h;
2479  if (y==0) sides |= EDGE_TOP;
2480  if (y + h >= v_edge_pos)
2481  sides |= EDGE_BOTTOM;
2482 
2483  edge_h= FFMIN(h, v_edge_pos - y);
2484 
2485  dsp->draw_edges(cur->f.data[0] + y * linesize[0],
2486  linesize[0], h_edge_pos, edge_h,
2487  EDGE_WIDTH, EDGE_WIDTH, sides);
2488  dsp->draw_edges(cur->f.data[1] + (y >> vshift) * linesize[1],
2489  linesize[1], h_edge_pos >> hshift, edge_h >> vshift,
2490  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift, sides);
2491  dsp->draw_edges(cur->f.data[2] + (y >> vshift) * linesize[2],
2492  linesize[2], h_edge_pos >> hshift, edge_h >> vshift,
2493  EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift, sides);
2494  }
2495 
2496  h = FFMIN(h, avctx->height - y);
2497 
2498  if(field_pic && first_field && !(avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)) return;
2499 
2500  if (avctx->draw_horiz_band) {
2501  AVFrame *src;
2502  int offset[AV_NUM_DATA_POINTERS];
2503  int i;
2504 
2505  if(cur->f.pict_type == AV_PICTURE_TYPE_B || low_delay ||
2507  src = &cur->f;
2508  else if (last)
2509  src = &last->f;
2510  else
2511  return;
2512 
2513  if (cur->f.pict_type == AV_PICTURE_TYPE_B &&
2514  picture_structure == PICT_FRAME &&
2515  avctx->codec_id != AV_CODEC_ID_H264 &&
2516  avctx->codec_id != AV_CODEC_ID_SVQ3) {
2517  for (i = 0; i < AV_NUM_DATA_POINTERS; i++)
2518  offset[i] = 0;
2519  }else{
2520  offset[0]= y * src->linesize[0];
2521  offset[1]=
2522  offset[2]= (y >> vshift) * src->linesize[1];
2523  for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
2524  offset[i] = 0;
2525  }
2526 
2527  emms_c();
2528 
2529  avctx->draw_horiz_band(avctx, src, offset,
2530  y, picture_structure, h);
2531  }
2532 }
2533 
2535 {
2536  int draw_edges = s->unrestricted_mv && !s->intra_only;
2538  &s->last_picture, y, h, s->picture_structure,
2539  s->first_field, draw_edges, s->low_delay,
2540  s->v_edge_pos, s->h_edge_pos);
2541 }
2542 
2543 void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
2544  const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2545  const int uvlinesize = s->current_picture.f.linesize[1];
2546  const int mb_size= 4;
2547 
2548  s->block_index[0]= s->b8_stride*(s->mb_y*2 ) - 2 + s->mb_x*2;
2549  s->block_index[1]= s->b8_stride*(s->mb_y*2 ) - 1 + s->mb_x*2;
2550  s->block_index[2]= s->b8_stride*(s->mb_y*2 + 1) - 2 + s->mb_x*2;
2551  s->block_index[3]= s->b8_stride*(s->mb_y*2 + 1) - 1 + s->mb_x*2;
2552  s->block_index[4]= s->mb_stride*(s->mb_y + 1) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2553  s->block_index[5]= s->mb_stride*(s->mb_y + s->mb_height + 2) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2554  //block_index is not used by mpeg2, so it is not affected by chroma_format
2555 
2556  s->dest[0] = s->current_picture.f.data[0] + ((s->mb_x - 1) << mb_size);
2557  s->dest[1] = s->current_picture.f.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2558  s->dest[2] = s->current_picture.f.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2559 
2561  {
2562  if(s->picture_structure==PICT_FRAME){
2563  s->dest[0] += s->mb_y * linesize << mb_size;
2564  s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2565  s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2566  }else{
2567  s->dest[0] += (s->mb_y>>1) * linesize << mb_size;
2568  s->dest[1] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2569  s->dest[2] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2570  assert((s->mb_y&1) == (s->picture_structure == PICT_BOTTOM_FIELD));
2571  }
2572  }
2573 }
2574 
2576  int i;
2577  MpegEncContext *s = avctx->priv_data;
2578 
2579  if(s==NULL || s->picture==NULL)
2580  return;
2581 
2582  for(i=0; i<s->picture_count; i++){
2583  if (s->picture[i].f.data[0] &&
2584  (s->picture[i].f.type == FF_BUFFER_TYPE_INTERNAL ||
2585  s->picture[i].f.type == FF_BUFFER_TYPE_USER))
2586  free_frame_buffer(s, &s->picture[i]);
2587  }
2589 
2590  s->mb_x= s->mb_y= 0;
2591 
2592  s->parse_context.state= -1;
2594  s->parse_context.overread= 0;
2596  s->parse_context.index= 0;
2597  s->parse_context.last_index= 0;
2598  s->bitstream_buffer_size=0;
2599  s->pp_time=0;
2600 }
2601 
2603  DCTELEM *block, int n, int qscale)
2604 {
2605  int i, level, nCoeffs;
2606  const uint16_t *quant_matrix;
2607 
2608  nCoeffs= s->block_last_index[n];
2609 
2610  if (n < 4)
2611  block[0] = block[0] * s->y_dc_scale;
2612  else
2613  block[0] = block[0] * s->c_dc_scale;
2614  /* XXX: only mpeg1 */
2615  quant_matrix = s->intra_matrix;
2616  for(i=1;i<=nCoeffs;i++) {
2617  int j= s->intra_scantable.permutated[i];
2618  level = block[j];
2619  if (level) {
2620  if (level < 0) {
2621  level = -level;
2622  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2623  level = (level - 1) | 1;
2624  level = -level;
2625  } else {
2626  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2627  level = (level - 1) | 1;
2628  }
2629  block[j] = level;
2630  }
2631  }
2632 }
2633 
2635  DCTELEM *block, int n, int qscale)
2636 {
2637  int i, level, nCoeffs;
2638  const uint16_t *quant_matrix;
2639 
2640  nCoeffs= s->block_last_index[n];
2641 
2642  quant_matrix = s->inter_matrix;
2643  for(i=0; i<=nCoeffs; i++) {
2644  int j= s->intra_scantable.permutated[i];
2645  level = block[j];
2646  if (level) {
2647  if (level < 0) {
2648  level = -level;
2649  level = (((level << 1) + 1) * qscale *
2650  ((int) (quant_matrix[j]))) >> 4;
2651  level = (level - 1) | 1;
2652  level = -level;
2653  } else {
2654  level = (((level << 1) + 1) * qscale *
2655  ((int) (quant_matrix[j]))) >> 4;
2656  level = (level - 1) | 1;
2657  }
2658  block[j] = level;
2659  }
2660  }
2661 }
2662 
2664  DCTELEM *block, int n, int qscale)
2665 {
2666  int i, level, nCoeffs;
2667  const uint16_t *quant_matrix;
2668 
2669  if(s->alternate_scan) nCoeffs= 63;
2670  else nCoeffs= s->block_last_index[n];
2671 
2672  if (n < 4)
2673  block[0] = block[0] * s->y_dc_scale;
2674  else
2675  block[0] = block[0] * s->c_dc_scale;
2676  quant_matrix = s->intra_matrix;
2677  for(i=1;i<=nCoeffs;i++) {
2678  int j= s->intra_scantable.permutated[i];
2679  level = block[j];
2680  if (level) {
2681  if (level < 0) {
2682  level = -level;
2683  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2684  level = -level;
2685  } else {
2686  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2687  }
2688  block[j] = level;
2689  }
2690  }
2691 }
2692 
2694  DCTELEM *block, int n, int qscale)
2695 {
2696  int i, level, nCoeffs;
2697  const uint16_t *quant_matrix;
2698  int sum=-1;
2699 
2700  if(s->alternate_scan) nCoeffs= 63;
2701  else nCoeffs= s->block_last_index[n];
2702 
2703  if (n < 4)
2704  block[0] = block[0] * s->y_dc_scale;
2705  else
2706  block[0] = block[0] * s->c_dc_scale;
2707  quant_matrix = s->intra_matrix;
2708  for(i=1;i<=nCoeffs;i++) {
2709  int j= s->intra_scantable.permutated[i];
2710  level = block[j];
2711  if (level) {
2712  if (level < 0) {
2713  level = -level;
2714  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2715  level = -level;
2716  } else {
2717  level = (int)(level * qscale * quant_matrix[j]) >> 3;
2718  }
2719  block[j] = level;
2720  sum+=level;
2721  }
2722  }
2723  block[63]^=sum&1;
2724 }
2725 
2727  DCTELEM *block, int n, int qscale)
2728 {
2729  int i, level, nCoeffs;
2730  const uint16_t *quant_matrix;
2731  int sum=-1;
2732 
2733  if(s->alternate_scan) nCoeffs= 63;
2734  else nCoeffs= s->block_last_index[n];
2735 
2736  quant_matrix = s->inter_matrix;
2737  for(i=0; i<=nCoeffs; i++) {
2738  int j= s->intra_scantable.permutated[i];
2739  level = block[j];
2740  if (level) {
2741  if (level < 0) {
2742  level = -level;
2743  level = (((level << 1) + 1) * qscale *
2744  ((int) (quant_matrix[j]))) >> 4;
2745  level = -level;
2746  } else {
2747  level = (((level << 1) + 1) * qscale *
2748  ((int) (quant_matrix[j]))) >> 4;
2749  }
2750  block[j] = level;
2751  sum+=level;
2752  }
2753  }
2754  block[63]^=sum&1;
2755 }
2756 
2758  DCTELEM *block, int n, int qscale)
2759 {
2760  int i, level, qmul, qadd;
2761  int nCoeffs;
2762 
2763  assert(s->block_last_index[n]>=0);
2764 
2765  qmul = qscale << 1;
2766 
2767  if (!s->h263_aic) {
2768  if (n < 4)
2769  block[0] = block[0] * s->y_dc_scale;
2770  else
2771  block[0] = block[0] * s->c_dc_scale;
2772  qadd = (qscale - 1) | 1;
2773  }else{
2774  qadd = 0;
2775  }
2776  if(s->ac_pred)
2777  nCoeffs=63;
2778  else
2779  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2780 
2781  for(i=1; i<=nCoeffs; i++) {
2782  level = block[i];
2783  if (level) {
2784  if (level < 0) {
2785  level = level * qmul - qadd;
2786  } else {
2787  level = level * qmul + qadd;
2788  }
2789  block[i] = level;
2790  }
2791  }
2792 }
2793 
2795  DCTELEM *block, int n, int qscale)
2796 {
2797  int i, level, qmul, qadd;
2798  int nCoeffs;
2799 
2800  assert(s->block_last_index[n]>=0);
2801 
2802  qadd = (qscale - 1) | 1;
2803  qmul = qscale << 1;
2804 
2805  nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2806 
2807  for(i=0; i<=nCoeffs; i++) {
2808  level = block[i];
2809  if (level) {
2810  if (level < 0) {
2811  level = level * qmul - qadd;
2812  } else {
2813  level = level * qmul + qadd;
2814  }
2815  block[i] = level;
2816  }
2817  }
2818 }
2819 
2823 void ff_set_qscale(MpegEncContext * s, int qscale)
2824 {
2825  if (qscale < 1)
2826  qscale = 1;
2827  else if (qscale > 31)
2828  qscale = 31;
2829 
2830  s->qscale = qscale;
2831  s->chroma_qscale= s->chroma_qscale_table[qscale];
2832 
2833  s->y_dc_scale= s->y_dc_scale_table[ qscale ];
2835 }
2836 
2838 {
2841 }
2842 
2844 {
2845  ERContext *er = &s->er;
2846 
2847  er->cur_pic = s->current_picture_ptr;
2848  er->last_pic = s->last_picture_ptr;
2849  er->next_pic = s->next_picture_ptr;
2850 
2851  er->pp_time = s->pp_time;
2852  er->pb_time = s->pb_time;
2853  er->quarter_sample = s->quarter_sample;
2855 
2856  ff_er_frame_start(er);
2857 }
int bitstream_buffer_size
Definition: mpegvideo.h:577
uint8_t * scratchpad
data area for the ME algo, so that the ME does not need to malloc/free
Definition: mpegvideo.h:161
#define PICT_BOTTOM_FIELD
Definition: mpegvideo.h:628
enum AVPixelFormat ff_hwaccel_pixfmt_list_420[]
Definition: mpegvideo.c:133
int last
number of values for last = 0
Definition: rl.h:40
const struct AVCodec * codec
Definition: avcodec.h:1348
int16_t(* b_bidir_back_mv_table_base)[2]
Definition: mpegvideo.h:370
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:61
int table_size
Definition: get_bits.h:66
#define PICT_TOP_FIELD
Definition: mpegvideo.h:627
discard all frames except keyframes
Definition: avcodec.h:535
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:2543
op_pixels_func put_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:259
unsigned int stream_codec_tag
fourcc from the AVI stream header (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + ...
Definition: avcodec.h:1373
av_cold void ff_dsputil_init(DSPContext *c, AVCodecContext *avctx)
Definition: dsputil.c:2656
int picture_number
Definition: mpegvideo.h:246
#define MAX_PICTURE_COUNT
Definition: mpegvideo.h:64
ScanTable intra_v_scantable
Definition: mpegvideo.h:269
#define HAVE_THREADS
Definition: config.h:236
S(GMC)-VOP MPEG4.
Definition: avutil.h:248
const uint8_t ff_zigzag_direct[64]
Definition: dsputil.c:59
void ff_release_unused_pictures(MpegEncContext *s, int remove_current)
Definition: mpegvideo.c:1342
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1435
int time_increment_bits
number of bits to represent the fractional part of time
Definition: mpegvideo.h:521
This structure describes decoded (raw) audio or video data.
Definition: avcodec.h:989
#define IS_SKIP(a)
Definition: mpegvideo.h:111
int qstride
QP store stride.
Definition: avcodec.h:1145
AVPanScan * pan_scan
Pan scan.
Definition: avcodec.h:1260
int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
Allocate a Picture.
Definition: mpegvideo.c:359
int16_t(* p_mv_table)[2]
MV table (1MV per MB) p-frame encoding.
Definition: mpegvideo.h:374
uint8_t * rd_scratchpad
scratchpad for rate distortion mb decision
Definition: mpegvideo.h:339
static void dct_unquantize_mpeg2_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2663
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:287
const uint8_t * y_dc_scale_table
qscale -> y_dc_scale table
Definition: mpegvideo.h:325
uint8_t * mb_mean
Table for MB luminance.
Definition: mpegvideo.h:146
int coded_width
Bitstream width / height, may be different from width/height.
Definition: avcodec.h:1515
av_cold int ff_dct_common_init(MpegEncContext *s)
Definition: mpegvideo.c:208
void(* dct_unquantize_mpeg2_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:676
av_cold int ff_MPV_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:941
void ff_MPV_common_init_arm(MpegEncContext *s)
Definition: mpegvideo_arm.c:42
misc image utilities
void ff_MPV_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo.c:2837
uint8_t * coded_block_base
Definition: mpegvideo.h:328
#define EDGE_TOP
Definition: dsputil.h:441
AVFrame * coded_frame
the picture in the bitstream
Definition: avcodec.h:2725
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y) ...
Definition: mpegvideo.h:288
uint16_t * mb_var
Table for MB variances.
Definition: mpegvideo.h:144
static void free_frame_buffer(MpegEncContext *s, Picture *pic)
Release a frame buffer.
Definition: mpegvideo.c:259
void ff_MPV_motion(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int dir, uint8_t **ref_picture, op_pixels_func(*pix_op)[4], qpel_mc_func(*qpix_op)[16])
int16_t(*[3] ac_val)[16]
used for for mpeg4 AC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:331
MJPEG encoder.
void(* idct_add)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> add dest -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:411
void * hwaccel_picture_private
hardware accelerator private data (Libav-allocated)
Definition: avcodec.h:1280
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:252
int msmpeg4_version
0=not msmpeg4, 1=mp41, 2=mp42, 3=mp43/divx3 4=wmv1/7 5=wmv2/8
Definition: mpegvideo.h:604
void(* draw_edges)(uint8_t *buf, int wrap, int width, int height, int w, int h, int sides)
Definition: dsputil.h:439
static const uint8_t mpeg2_dc_scale_table3[128]
Definition: mpegvideo.c:109
void ff_xvmc_field_end(MpegEncContext *s)
Complete frame/field rendering by passing any remaining blocks.
int needs_realloc
Picture needs to be reallocated (eg due to a frame size change)
Definition: mpegvideo.h:150
uint8_t * bitstream_buffer
Definition: mpegvideo.h:576
enum AVCodecID codec_id
Definition: mpegvideo.h:228
void(* dct_unquantize_h263_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:680
void ff_init_rl(RLTable *rl, uint8_t static_store[2][2 *MAX_RUN+MAX_LEVEL+3])
Definition: mpegvideo.c:1250
int16_t(*[2] motion_val_base)[2]
Definition: mpegvideo.h:103
HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the b...
Definition: pixfmt.h:123
int field_picture
whether or not the picture was encoded in separate fields
Definition: mpegvideo.h:140
void(* dct_unquantize_mpeg2_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:674
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1533
int16_t(*[2][2] p_field_mv_table)[2]
MV table (2MV per MB) interlaced p-frame encoding.
Definition: mpegvideo.h:380
int picture_range_end
the part of picture that this context can allocate in
Definition: mpegvideo.h:320
static void dct_unquantize_h263_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2794
int16_t(* p_mv_table_base)[2]
Definition: mpegvideo.h:366
uint8_t raster_end[64]
Definition: dsputil.h:184
#define wrap(func)
Definition: w64xmmtest.h:70
uint32_t * score_map
map to store the scores
Definition: mpegvideo.h:167
mpegvideo header.
av_dlog(ac->avr,"%d samples - audio_convert: %s to %s (%s)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt), use_generic?ac->func_descr_generic:ac->func_descr)
discard all
Definition: avcodec.h:536
uint8_t permutated[64]
Definition: dsputil.h:183
#define IS_INTRA4x4(a)
Definition: mpegvideo.h:106
const int8_t * table_level
Definition: rl.h:43
uint8_t run
Definition: svq3.c:132
static void free_duplicate_context(MpegEncContext *s)
Definition: mpegvideo.c:527
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2711
int mb_num
number of MBs of a picture
Definition: mpegvideo.h:253
int stride
Definition: mace.c:144
int frame_start_found
Definition: parser.h:34
int ff_xvmc_field_start(MpegEncContext *s, AVCodecContext *avctx)
Find and store the surfaces that are used as reference frames.
static void free_picture(MpegEncContext *s, Picture *pic)
Deallocate a picture.
Definition: mpegvideo.c:448
int qscale
QP.
Definition: mpegvideo.h:343
RLTable.
Definition: rl.h:38
int h263_aic
Advanded INTRA Coding (AIC)
Definition: mpegvideo.h:263
int16_t(* b_back_mv_table)[2]
MV table (1MV per MB) backward mode b-frame encoding.
Definition: mpegvideo.h:376
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:66
int chroma_x_shift
Definition: mpegvideo.h:644
int encoding
true if we are encoding (vs decoding)
Definition: mpegvideo.h:230
void ff_MPV_common_init_bfin(MpegEncContext *s)
int block_wrap[6]
Definition: mpegvideo.h:435
static void dct_unquantize_mpeg1_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2602
int16_t(* b_back_mv_table_base)[2]
Definition: mpegvideo.h:368
uint16_t pp_time
static void backup_duplicate_context(MpegEncContext *bak, MpegEncContext *src)
Definition: mpegvideo.c:547
#define USES_LIST(a, list)
does this mb use listX, note does not work if subMBs
Definition: mpegvideo.h:127
void ff_clean_intra_table_entries(MpegEncContext *s)
Clean dc, ac, coded_block for the current non-intra MB.
Definition: mpegvideo.c:2174
int picture_range_start
Definition: mpegvideo.h:320
#define COLOR(theta, r)
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:151
#define CONFIG_GRAY
Definition: config.h:277
Switching Intra.
Definition: avutil.h:249
#define MAX_THREADS
Definition: mpegvideo.h:62
uint8_t * visualization_buffer[3]
temporary buffer vor MV visualization
Definition: mpegvideo.h:321
struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:2622
#define CONFIG_WMV2_DECODER
Definition: config.h:525
int picture_in_gop_number
0-> first pic in gop, ...
Definition: mpegvideo.h:247
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: utils.c:72
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo.c:2534
int8_t * max_run[2]
encoding & decoding
Definition: rl.h:46
void ff_MPV_common_init_altivec(MpegEncContext *s)
int context_reinit
Definition: mpegvideo.h:702
const uint8_t ff_alternate_vertical_scan[64]
Definition: dsputil.c:97
int16_t * dc_val_base
Definition: mpegvideo.h:323
#define CONFIG_WMV2_ENCODER
Definition: config.h:885
int ff_MPV_common_frame_size_change(MpegEncContext *s)
Definition: mpegvideo.c:1128
uint8_t
DCTELEM(*[12] pblocks)[64]
Definition: mpegvideo.h:660
#define IS_8X16(a)
Definition: mpegvideo.h:118
Picture ** input_picture
next pictures on display order for encoding
Definition: mpegvideo.h:257
#define PICT_FRAME
Definition: mpegvideo.h:629
enum OutputFormat out_format
output format
Definition: mpegvideo.h:220
void(* qpel_mc_func)(uint8_t *dst, uint8_t *src, int stride)
Definition: dsputil.h:144
uint16_t(* dct_offset)[64]
Definition: mpegvideo.h:470
#define AV_RB32
Definition: intreadwrite.h:130
uint8_t * pred_dir_table
used to store pred_dir for partitioned decoding
Definition: mpegvideo.h:337
qpel_mc_func(* qpel_put)[16]
Definition: mpegvideo.h:199
static void dct_unquantize_h263_intra_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2757
void ff_mpeg_er_frame_start(MpegEncContext *s)
Definition: mpegvideo.c:2843
#define emms_c()
Definition: internal.h:145
uint8_t motion_subsample_log2
log2 of the size of the block which a single vector in motion_val represents: (4->16x16, 3->8x8, 2-> 4x4, 1-> 2x2)
Definition: avcodec.h:1302
#define IS_GMC(a)
Definition: mpegvideo.h:115
int no_rounding
apply no rounding to motion compensation (MPEG4, msmpeg4, ...) for b-frames rounding mode is always 0...
Definition: mpegvideo.h:408
int interlaced_dct
Definition: mpegvideo.h:649
Picture current_picture
copy of the current picture structure.
Definition: mpegvideo.h:314
int intra_dc_precision
Definition: mpegvideo.h:631
static int pic_is_unused(MpegEncContext *s, Picture *pic)
Definition: mpegvideo.c:1357
op_pixels_func avg_pixels_tab[4][4]
Halfpel motion compensation with rounding (a+b+1)>>1.
Definition: dsputil.h:271
int16_t(* b_bidir_forw_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:377
void(* dct_unquantize_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:686
void(* dct_unquantize_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:688
float * cplx_tab
Definition: mpegvideo.h:698
int8_t * max_level[2]
encoding & decoding
Definition: rl.h:45
uint16_t pp_time
time distance between the last 2 p,s,i frames
Definition: mpegvideo.h:526
uint8_t idct_permutation[64]
idct input permutation.
Definition: dsputil.h:425
uint8_t * b_scratchpad
scratchpad used for writing into write only buffers
Definition: mpegvideo.h:341
static void mpeg_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type, int(*mv)[2][4][2], int mb_x, int mb_y, int mb_intra, int mb_skipped)
Definition: mpegvideo.c:147
int flags2
AVCodecContext.flags2.
Definition: mpegvideo.h:232
int interlaced_frame
The content of the picture is interlaced.
Definition: avcodec.h:1232
int mb_height
number of MBs horizontally & vertically
Definition: mpegvideo.h:248
enum AVPixelFormat ff_pixfmt_list_420[]
Definition: mpegvideo.c:128
void ff_MPV_frame_end(MpegEncContext *s)
Definition: mpegvideo.c:1640
int codec_tag
internal codec_tag upper case converted from avctx codec_tag
Definition: mpegvideo.h:238
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:2502
void(* dct_unquantize_mpeg1_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:670
int16_t(*[2][2] p_field_mv_table_base)[2]
Definition: mpegvideo.h:372
void * owner2
pointer to the context that allocated this picture
Definition: mpegvideo.h:149
static int free_context_frame(MpegEncContext *s)
Frees and resets MpegEncContext fields depending on the resolution.
Definition: mpegvideo.c:1075
static void update_noise_reduction(MpegEncContext *s)
Definition: mpegvideo.c:1404
#define MAX_LEVEL
Definition: rl.h:35
#define IS_INTERLACED(a)
Definition: mpegvideo.h:113
static void ff_update_block_index(MpegEncContext *s)
Definition: mpegvideo.h:812
void ff_set_qscale(MpegEncContext *s, int qscale)
set qscale and update qscale dependent variables.
Definition: mpegvideo.c:2823
int(* q_inter_matrix)[64]
Definition: mpegvideo.h:462
#define r
Definition: input.c:51
void ff_xvmc_decode_mb(MpegEncContext *s)
Synthesize the data needed by XvMC to render one macroblock of data.
int(* q_intra_matrix)[64]
precomputed matrix (combine qscale and DCT renorm)
Definition: mpegvideo.h:461
int intra_only
if true, only intra pictures are generated
Definition: mpegvideo.h:218
int16_t * dc_val[3]
used for mpeg4 DC prediction, all 3 arrays must be continuous
Definition: mpegvideo.h:324
int h263_plus
h263 plus headers
Definition: mpegvideo.h:225
int slice_context_count
number of used thread_contexts
Definition: mpegvideo.h:290
int last_non_b_pict_type
used for mpeg4 gmc b-frames & ratecontrol
Definition: mpegvideo.h:352
unsigned int buffer_size
Definition: parser.h:32
int stream_codec_tag
internal stream_codec_tag upper case converted from avctx stream_codec_tag
Definition: mpegvideo.h:239
int last_dc[3]
last DC values for MPEG1
Definition: mpegvideo.h:322
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:75
Multithreading support functions.
int mb_skipped
MUST BE SET only during DECODING.
Definition: mpegvideo.h:332
int reference
is this picture used as reference The values for this are the same as the MpegEncContext.picture_structure variable, that is 1->top field, 2->bottom field, 3->frame/both fields.
Definition: avcodec.h:1132
int chroma_y_shift
Definition: mpegvideo.h:645
static int find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1367
int partitioned_frame
is current frame partitioned
Definition: mpegvideo.h:555
int is_copy
Whether the parent AVCodecContext is a copy of the context which had init() called on it...
Definition: internal.h:63
short * dct_coeff
DCT coefficients.
Definition: avcodec.h:1187
const uint8_t ff_alternate_horizontal_scan[64]
Definition: dsputil.c:86
void(* idct_put)(uint8_t *dest, int line_size, DCTELEM *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: dsputil.h:405
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:1460
int unrestricted_mv
mv can point outside of the coded picture
Definition: mpegvideo.h:359
ERContext er
Definition: mpegvideo.h:704
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:2752
int last_lambda_for[5]
last lambda for a specific pict type
Definition: mpegvideo.h:355
int capabilities
Codec capabilities.
Definition: avcodec.h:2979
uint8_t * edge_emu_buffer
temporary buffer for if MVs point to out-of-frame data
Definition: mpegvideo.h:338
uint8_t * base[AV_NUM_DATA_POINTERS]
pointer to the first allocated byte of the picture.
Definition: avcodec.h:1073
void(* decode_mb)(void *opaque, int ref, int mv_dir, int mv_type, int(*mv)[2][4][2], int mb_x, int mb_y, int mb_intra, int mb_skipped)
int flags
CODEC_FLAG_*.
Definition: avcodec.h:1434
struct Picture * next_pic
static enum AVDiscard skip_idct
Definition: avplay.c:258
void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
Definition: videodsp.c:36
int overread_index
the index into ParseContext.buffer of the overread bytes
Definition: parser.h:36
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:146
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:545
uint16_t * mb_type
Table for candidate MB types for encoding.
Definition: mpegvideo.h:415
#define IS_INTRA(a)
Definition: mpegvideo.h:109
static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw a line from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1718
int low_delay
no reordering needed / has no b-frames
Definition: mpegvideo.h:558
op_pixels_func put_no_rnd_pixels_tab[4][4]
Halfpel motion compensation with no rounding (a+b)>>1.
Definition: dsputil.h:283
static void dct_unquantize_mpeg2_intra_bitexact(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2693
VLC vlc
decoding only deprecated FIXME remove
Definition: rl.h:47
uint8_t *[2][2] b_field_select_table
Definition: mpegvideo.h:383
int8_t len
Definition: get_bits.h:71
uint8_t * mbintra_table
int * mb_index2xy
int priv_data_size
Size of HW accelerator private data.
Definition: avcodec.h:3139
int off
Definition: dsputil_bfin.c:28
DCTELEM(* blocks)[8][64]
Definition: mpegvideo.h:663
int picture_count
number of allocated pictures (MAX_PICTURE_COUNT * avctx->thread_count)
Definition: mpegvideo.h:319
static const uint8_t ff_default_chroma_qscale_table[32]
Definition: mpegvideo.c:67
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo.c:2575
int coded_picture_number
used to set pic->coded_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:245
int * lambda_table
Definition: mpegvideo.h:347
uint8_t * error_status_table
int n
number of entries of table_vlc minus 1
Definition: rl.h:39
#define IS_8X8(a)
Definition: mpegvideo.h:119
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:220
int err_recognition
Definition: mpegvideo.h:498
void(* dct_unquantize_mpeg1_inter)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:672
void(* draw_horiz_band)(struct AVCodecContext *s, const AVFrame *src, int offset[AV_NUM_DATA_POINTERS], int y, int type, int height)
If non NULL, 'draw_horiz_band' is called by the libavcodec decoder to draw a horizontal band...
Definition: avcodec.h:1567
int progressive_frame
Definition: mpegvideo.h:647
static DCTELEM block[64]
Definition: dct-test.c:169
enum AVPictureType pict_type
Picture type of the frame, see ?_TYPE below.
Definition: avcodec.h:1065
struct Picture * last_pic
int top_field_first
Definition: mpegvideo.h:633
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2602
uint8_t * er_temp_buffer
int overread
the number of bytes which where irreversibly read from the next frame
Definition: parser.h:35
uint16_t(* q_inter_matrix16)[2][64]
Definition: mpegvideo.h:465
int last_index
Definition: parser.h:31
int next_p_frame_damaged
set if the next p frame is damaged, to avoid showing trashed b frames
Definition: mpegvideo.h:497
int width
picture width / height.
Definition: avcodec.h:1508
uint8_t * mbskip_table
used to avoid copy if macroblock skipped (for black regions for example) and used for b-frame encodin...
Definition: mpegvideo.h:333
int type
type of the buffer (to keep track of who has to deallocate data[*])
Definition: avcodec.h:1217
Picture * current_picture_ptr
pointer to the current picture
Definition: mpegvideo.h:318
void ff_copy_picture(Picture *dst, Picture *src)
Definition: mpegvideo.c:250
Picture.
Definition: mpegvideo.h:95
int alternate_scan
Definition: mpegvideo.h:637
unsigned int allocated_bitstream_buffer_size
Definition: mpegvideo.h:578
uint16_t pb_time
int16_t(* ac_val_base)[16]
Definition: mpegvideo.h:330
int32_t
DSPContext * dsp
uint16_t(* q_intra_matrix16)[2][64]
identical to the above but for MMX & these are not permutated, second 64 entries are bias ...
Definition: mpegvideo.h:464
const int8_t * table_run
Definition: rl.h:42
static av_always_inline void MPV_decode_mb_internal(MpegEncContext *s, DCTELEM block[12][64], int is_mpeg12)
Definition: mpegvideo.c:2215
int16_t(*[2][2][2] b_field_mv_table_base)[2]
Definition: mpegvideo.h:373
int quality
quality (between 1 (good) and FF_LAMBDA_MAX (bad))
Definition: avcodec.h:1122
int(* ac_stats)[2][MAX_LEVEL+1][MAX_RUN+1][2]
[mb_intra][isChroma][level][run][last]
Definition: mpegvideo.h:609
int16_t(* b_forw_mv_table_base)[2]
Definition: mpegvideo.h:367
int block_last_index[12]
last non zero coefficient in block
Definition: mpegvideo.h:262
MotionEstContext me
Definition: mpegvideo.h:406
int ff_mpv_frame_size_alloc(MpegEncContext *s, int linesize)
Definition: mpegvideo.c:272
#define EDGE_BOTTOM
Definition: dsputil.h:442
int mb_decision
macroblock decision mode
Definition: avcodec.h:1882
uint8_t * mbintra_table
used to avoid setting {ac, dc, cbp}-pred stuff to zero on inter MB decoding
Definition: mpegvideo.h:335
#define ME_MAP_SIZE
Definition: mpegvideo.h:66
#define DELAYED_PIC_REF
Value of Picture.reference when Picture is not a reference picture, but is held for delayed output...
Definition: mpegvideo.h:88
int ff_MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function for encode/decode called after coding/decoding the header and before a frame is code...
Definition: mpegvideo.c:1429
void ff_draw_horiz_band(AVCodecContext *avctx, DSPContext *dsp, Picture *cur, Picture *last, int y, int h, int picture_structure, int first_field, int draw_edges, int low_delay, int v_edge_pos, int h_edge_pos)
Definition: mpegvideo.c:2458
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo.c:597
RL_VLC_ELEM * rl_vlc[32]
decoding only
Definition: rl.h:48
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:100
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:2733
int block_index[6]
index to current MB in block based arrays with edges
Definition: mpegvideo.h:434
#define IS_16X8(a)
Definition: mpegvideo.h:117
int xvmc_acceleration
XVideo Motion Acceleration.
Definition: avcodec.h:1875
int * mb_index2xy
mb_index -> mb_x + mb_y*mb_stride
Definition: mpegvideo.h:438
int first_field
is 1 for the first field of a field picture 0 otherwise
Definition: mpegvideo.h:651
void(* op_pixels_func)(uint8_t *block, const uint8_t *pixels, int line_size, int h)
Definition: dsputil.h:142
static const int8_t mv[256][2]
Definition: 4xm.c:73
uint32_t * mb_type
macroblock type table mb_type_base + mb_width + 2
Definition: avcodec.h:1180
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:390
NULL
Definition: eval.c:52
uint16_t * mc_mb_var
Table for motion compensated MB variances.
Definition: mpegvideo.h:145
#define MV_DIR_BACKWARD
Definition: mpegvideo.h:387
struct Picture * cur_pic
int16_t(* b_bidir_forw_mv_table_base)[2]
Definition: mpegvideo.h:369
const uint8_t *const ff_mpeg2_dc_scale_table[4]
Definition: mpegvideo.c:121
static int width
Definition: utils.c:156
int coded_picture_number
picture number in bitstream order
Definition: avcodec.h:1109
const uint8_t * avpriv_mpv_find_start_code(const uint8_t *restrict p, const uint8_t *end, uint32_t *restrict state)
Definition: mpegvideo.c:174
uint16_t inter_matrix[64]
Definition: mpegvideo.h:443
uint8_t * buffer
Definition: parser.h:29
struct MpegEncContext * thread_context[MAX_THREADS]
Definition: mpegvideo.h:289
external API header
void ff_thread_await_progress(AVFrame *f, int n, int field)
Wait for earlier decoding threads to finish reference pictures.
Definition: pthread.c:684
int8_t * qscale_table_base
Definition: mpegvideo.h:102
enum AVCodecID codec_id
Definition: avcodec.h:1350
static av_const unsigned int ff_sqrt(unsigned int a)
Definition: mathops.h:198
static void dct_unquantize_mpeg2_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2726
enum AVDiscard skip_idct
Definition: avcodec.h:2900
uint32_t * mb_type_base
Definition: mpegvideo.h:104
int linesize[AV_NUM_DATA_POINTERS]
Size, in bytes, of the data for each picture/channel plane.
Definition: avcodec.h:1008
int debug
debug
Definition: avcodec.h:2568
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:55
main external API structure.
Definition: avcodec.h:1339
ScanTable intra_scantable
Definition: mpegvideo.h:267
uint8_t * coded_block
used for coded block pattern prediction (msmpeg4v3, wmv1)
Definition: mpegvideo.h:329
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:216
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:1365
const uint8_t ff_mpeg1_dc_scale_table[128]
Definition: mpegvideo.c:73
int16_t(*[2] motion_val)[2]
motion vector table
Definition: avcodec.h:1172
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:392
void avcodec_default_free_buffers(AVCodecContext *s)
Definition: utils.c:1756
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:489
uint32_t state
contains the last few bytes in MSB order
Definition: parser.h:33
Picture * picture
main picture buffer
Definition: mpegvideo.h:256
int progressive_sequence
Definition: mpegvideo.h:623
int slice_flags
slice flags
Definition: avcodec.h:1865
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
Definition: utils.c:604
int coded_height
Definition: avcodec.h:1515
Switching Predicted.
Definition: avutil.h:250
ScanTable intra_h_scantable
Definition: mpegvideo.h:268
int16_t(*[2][2][2] b_field_mv_table)[2]
MV table (4MV per MB) interlaced b-frame encoding.
Definition: mpegvideo.h:381
uint8_t * cbp_table
used to store cbp, ac_pred for partitioned decoding
Definition: mpegvideo.h:336
unsigned int avpriv_toupper4(unsigned int x)
Definition: utils.c:2097
uint8_t * index_run[2]
encoding only
Definition: rl.h:44
int context_initialized
Definition: mpegvideo.h:243
int input_picture_number
used to set pic->display_picture_number, should not be used for/by anything else
Definition: mpegvideo.h:244
void ff_MPV_common_init_x86(MpegEncContext *s)
Definition: mpegvideo.c:587
int8_t * ref_index[2]
motion reference frame index the order in which these are stored can depend on the codec...
Definition: avcodec.h:1195
DSPContext dsp
pointers for accelerated dsp functions
Definition: mpegvideo.h:362
#define s1
Definition: regdef.h:38
int f_code
forward MV resolution
Definition: mpegvideo.h:364
#define COPY(a)
short DCTELEM
Definition: dsputil.h:39
AVCodecContext * avctx
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
Definition: pixfmt.h:132
#define MV_DIR_FORWARD
Definition: mpegvideo.h:386
int max_b_frames
max number of b-frames for encoding
Definition: mpegvideo.h:233
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:350
DCTELEM(* block)[64]
points to one of the following blocks
Definition: mpegvideo.h:662
int h263_pred
use mpeg4/h263 ac/dc predictions
Definition: mpegvideo.h:221
int16_t(* b_bidir_back_mv_table)[2]
MV table (1MV per MB) bidir mode b-frame encoding.
Definition: mpegvideo.h:378
static int init_context_frame(MpegEncContext *s)
Initialize and allocates MpegEncContext fields dependent on the resolution.
Definition: mpegvideo.c:790
static uint32_t state
Definition: trasher.c:27
uint8_t *[2] p_field_select_table
Definition: mpegvideo.h:382
int16_t(* b_direct_mv_table)[2]
MV table (1MV per MB) direct mode b-frame encoding.
Definition: mpegvideo.h:379
Pan Scan area.
Definition: avcodec.h:788
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: avcodec.h:997
const uint8_t * c_dc_scale_table
qscale -> c_dc_scale table
Definition: mpegvideo.h:326
static void add_dequant_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2161
int8_t * qscale_table
QP table.
Definition: avcodec.h:1139
uint8_t level
Definition: svq3.c:133
#define IS_INTRA16x16(a)
Definition: mpegvideo.h:107
qpel_mc_func(* qpel_avg)[16]
Definition: mpegvideo.h:200
int mv[2][4][2]
motion vectors for a macroblock first coordinate : 0 = forward 1 = backward second " : depend...
Definition: mpegvideo.h:400
int16_t(* b_forw_mv_table)[2]
MV table (1MV per MB) forward mode b-frame encoding.
Definition: mpegvideo.h:375
int b8_stride
2*mb_width+1 used for some 8x8 block arrays to allow simple addressing
Definition: mpegvideo.h:250
int noise_reduction
noise reduction strength
Definition: avcodec.h:1914
void(* clear_blocks)(DCTELEM *blocks)
Definition: dsputil.h:219
#define IS_ACPRED(a)
Definition: mpegvideo.h:124
int height
Definition: gxfenc.c:72
MpegEncContext.
Definition: mpegvideo.h:212
uint8_t run
Definition: get_bits.h:72
Picture * next_picture_ptr
pointer to the next picture (for bidir pred)
Definition: mpegvideo.h:317
#define MAX_RUN
Definition: rl.h:34
struct AVCodecContext * avctx
Definition: mpegvideo.h:214
static void draw_arrow(uint8_t *buf, int sx, int sy, int ex, int ey, int w, int h, int stride, int color)
Draw an arrow from (ex, ey) -> (sx, sy).
Definition: mpegvideo.c:1771
hardware decoding through VDA
Definition: pixfmt.h:153
discard all non reference
Definition: avcodec.h:533
int(* dct_error_sum)[64]
Definition: mpegvideo.h:468
int partitioned_frame
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:65
void ff_init_vlc_rl(RLTable *rl)
Definition: mpegvideo.c:1302
common internal api header.
int mb_stride
mb_width+1 used for some arrays to allow simple addressing of left & top MBs without sig11 ...
Definition: mpegvideo.h:249
void ff_MPV_common_defaults(MpegEncContext *s)
Set the given MpegEncContext to common defaults (same for encoding and decoding). ...
Definition: mpegvideo.c:716
#define IS_PCM(a)
Definition: mpegvideo.h:108
uint8_t * dest[3]
Definition: mpegvideo.h:436
#define FF_ALLOC_OR_GOTO(ctx, p, size, label)
Definition: internal.h:60
int last_pict_type
Definition: mpegvideo.h:351
int b4_stride
4*mb_width+1 used for some 4x4 block arrays to allow simple addressing
Definition: mpegvideo.h:251
int16_t * dc_val[3]
Picture last_picture
copy of the previous picture structure.
Definition: mpegvideo.h:296
uint8_t * obmc_scratchpad
Definition: mpegvideo.h:340
static int alloc_frame_buffer(MpegEncContext *s, Picture *pic)
Allocate a frame buffer.
Definition: mpegvideo.c:300
Picture * last_picture_ptr
pointer to the previous picture.
Definition: mpegvideo.h:316
Bi-dir predicted.
Definition: avutil.h:247
int index
Definition: parser.h:30
static void FUNCC() draw_edges(uint8_t *_buf, int _wrap, int width, int height, int w, int h, int sides)
const uint8_t * chroma_qscale_table
qscale -> chroma_qscale (h263)
Definition: mpegvideo.h:327
static const uint8_t color[]
Definition: log.c:52
uint32_t * map
map to avoid duplicate evaluations
Definition: mpegvideo.h:166
int ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src)
Definition: mpegvideo.c:574
DSP utils.
int slices
Number of slices.
Definition: avcodec.h:2095
void * priv_data
Definition: avcodec.h:1382
int picture_structure
Definition: mpegvideo.h:625
VideoDSPContext vdsp
Definition: mpegvideo.h:363
void ff_wmv2_add_mb(MpegEncContext *s, DCTELEM block[6][64], uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr)
Definition: wmv2.c:59
void ff_MPV_decode_mb(MpegEncContext *s, DCTELEM block[12][64])
Definition: mpegvideo.c:2446
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: avcodec.h:1239
#define IS_DIRECT(a)
Definition: mpegvideo.h:114
int len
void ff_MPV_common_end(MpegEncContext *s)
Definition: mpegvideo.c:1200
void ff_print_debug_info(MpegEncContext *s, AVFrame *pict)
Print debugging info for the given picture.
Definition: mpegvideo.c:1802
void ff_init_scantable(uint8_t *permutation, ScanTable *st, const uint8_t *src_scantable)
Definition: dsputil.c:122
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1390
int ff_MPV_lowest_referenced_row(MpegEncContext *s, int dir)
find the lowest MB row referenced in the MVs
Definition: mpegvideo.c:2109
ParseContext parse_context
Definition: mpegvideo.h:500
VLC_TYPE(* table)[2]
code, bits
Definition: get_bits.h:65
Picture next_picture
copy of the next picture structure.
Definition: mpegvideo.h:302
#define EDGE_WIDTH
Definition: dsputil.h:440
int key_frame
1 -> keyframe, 0-> not
Definition: avcodec.h:1058
static const uint8_t mpeg2_dc_scale_table1[128]
Definition: mpegvideo.c:85
int linesize
line size, in bytes, may be different from width
Definition: mpegvideo.h:254
uint8_t * mbskip_table
mbskip_table[mb]>=1 if MB didn't change stride= mb_width = (width+15)>>4
Definition: avcodec.h:1158
int16_t level
Definition: get_bits.h:70
#define IS_16X16(a)
Definition: mpegvideo.h:116
Picture ** reordered_input_picture
pointer to the next pictures in codedorder for encoding
Definition: mpegvideo.h:258
int flags2
CODEC_FLAG2_*.
Definition: avcodec.h:1441
static int init_er(MpegEncContext *s)
Definition: mpegvideo.c:750
static const uint8_t mpeg2_dc_scale_table2[128]
Definition: mpegvideo.c:97
int chroma_qscale
chroma QP
Definition: mpegvideo.h:344
struct AVFrame f
Definition: mpegvideo.h:96
static void add_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size)
Definition: mpegvideo.c:2153
static int init_duplicate_context(MpegEncContext *s, MpegEncContext *base)
Definition: mpegvideo.c:482
#define REBASE_PICTURE(pic, new_ctx, old_ctx)
Definition: h264.c:1355
void ff_er_frame_start(ERContext *s)
int flags
AVCodecContext.flags (HQ, MV4, ...)
Definition: mpegvideo.h:231
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:441
int workaround_bugs
workaround bugs in encoders which cannot be detected automatically
Definition: mpegvideo.h:237
ScanTable inter_scantable
if inter == intra then intra should be used to reduce tha cache usage
Definition: mpegvideo.h:266
static void put_dct(MpegEncContext *s, DCTELEM *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo.c:2145
uint8_t * temp
Definition: mpegvideo.h:164
int avcodec_default_get_buffer(AVCodecContext *s, AVFrame *pic)
Definition: utils.c:451
static void dct_unquantize_mpeg1_inter_c(MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.c:2634
void ff_thread_report_progress(AVFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread.c:666
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread.c:921
static int first_field(int fd)
Definition: v4l2.c:207
int debug_mv
debug
Definition: avcodec.h:2592
int ff_find_unused_picture(MpegEncContext *s, int shared)
Definition: mpegvideo.c:1390
#define MV_TYPE_8X8
4 vectors (h263, mpeg4 4MV)
Definition: mpegvideo.h:391
int16_t(* b_direct_mv_table_base)[2]
Definition: mpegvideo.h:371
int b_code
backward MV resolution for B Frames (mpeg4)
Definition: mpegvideo.h:365
#define CONFIG_MPEG_XVMC_DECODER
Definition: config.h:442
uint8_t ** extended_data
pointers to the data planes/channels.
Definition: avcodec.h:1028
void(* dct_unquantize_h263_intra)(struct MpegEncContext *s, DCTELEM *block, int n, int qscale)
Definition: mpegvideo.h:678
float * bits_tab
Definition: mpegvideo.h:698
#define restrict
Definition: config.h:8
int dct_count[2]
Definition: mpegvideo.h:469
uint8_t * mbskip_table
int uvlinesize
line size, for chroma in bytes, may be different from width
Definition: mpegvideo.h:255
void ff_MPV_common_init_axp(MpegEncContext *s)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:63
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:158
void ff_thread_release_buffer(AVCodecContext *avctx, AVFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread.c:979
for(j=16;j >0;--j)
#define FF_ALLOCZ_OR_GOTO(ctx, p, size, label)
Definition: internal.h:69
Predicted.
Definition: avutil.h:246
DSPContext.
Definition: dsputil.h:194
void ff_MPV_decode_defaults(MpegEncContext *s)
Set the given MpegEncContext to defaults for decoding.
Definition: mpegvideo.c:745
uint16_t pb_time
time distance between the last b and p,s,i frame
Definition: mpegvideo.h:527
if(!(ptr_align%ac->ptr_align)&&samples_align >=aligned_len)