FFmpeg  4.1.11
ffv1dec.c
Go to the documentation of this file.
1 /*
2  * FFV1 decoder
3  *
4  * Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * FF Video Codec 1 (a lossless codec) decoder
26  */
27 
28 #include "libavutil/avassert.h"
29 #include "libavutil/crc.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/imgutils.h"
32 #include "libavutil/pixdesc.h"
33 #include "libavutil/timer.h"
34 #include "avcodec.h"
35 #include "internal.h"
36 #include "get_bits.h"
37 #include "rangecoder.h"
38 #include "golomb.h"
39 #include "mathops.h"
40 #include "ffv1.h"
41 
43  int is_signed)
44 {
45  if (get_rac(c, state + 0))
46  return 0;
47  else {
48  int i, e;
49  unsigned a;
50  e = 0;
51  while (get_rac(c, state + 1 + FFMIN(e, 9))) { // 1..10
52  e++;
53  if (e > 31)
54  return AVERROR_INVALIDDATA;
55  }
56 
57  a = 1;
58  for (i = e - 1; i >= 0; i--)
59  a += a + get_rac(c, state + 22 + FFMIN(i, 9)); // 22..31
60 
61  e = -(is_signed && get_rac(c, state + 11 + FFMIN(e, 10))); // 11..21
62  return (a ^ e) - e;
63  }
64 }
65 
66 static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
67 {
68  return get_symbol_inline(c, state, is_signed);
69 }
70 
71 static inline int get_vlc_symbol(GetBitContext *gb, VlcState *const state,
72  int bits)
73 {
74  int k, i, v, ret;
75 
76  i = state->count;
77  k = 0;
78  while (i < state->error_sum) { // FIXME: optimize
79  k++;
80  i += i;
81  }
82 
83  v = get_sr_golomb(gb, k, 12, bits);
84  ff_dlog(NULL, "v:%d bias:%d error:%d drift:%d count:%d k:%d",
85  v, state->bias, state->error_sum, state->drift, state->count, k);
86 
87  v ^= ((2 * state->drift + state->count) >> 31);
88 
89  ret = fold(v + state->bias, bits);
90 
91  update_vlc_state(state, v);
92 
93  return ret;
94 }
95 
97 {
98  if (s->ac != AC_GOLOMB_RICE) {
99  RangeCoder *const c = &s->c;
100  if (c->overread > MAX_OVERREAD)
101  return AVERROR_INVALIDDATA;
102  } else {
103  if (get_bits_left(&s->gb) < 1)
104  return AVERROR_INVALIDDATA;
105  }
106  return 0;
107 }
108 
109 #define TYPE int16_t
110 #define RENAME(name) name
111 #include "ffv1dec_template.c"
112 #undef TYPE
113 #undef RENAME
114 
115 #define TYPE int32_t
116 #define RENAME(name) name ## 32
117 #include "ffv1dec_template.c"
118 
120  int w, int h, int stride, int plane_index,
121  int pixel_stride)
122 {
123  int x, y;
124  int16_t *sample[2];
125  sample[0] = s->sample_buffer + 3;
126  sample[1] = s->sample_buffer + w + 6 + 3;
127 
128  s->run_index = 0;
129 
130  memset(s->sample_buffer, 0, 2 * (w + 6) * sizeof(*s->sample_buffer));
131 
132  for (y = 0; y < h; y++) {
133  int16_t *temp = sample[0]; // FIXME: try a normal buffer
134 
135  sample[0] = sample[1];
136  sample[1] = temp;
137 
138  sample[1][-1] = sample[0][0];
139  sample[0][w] = sample[0][w - 1];
140 
141 // { START_TIMER
142  if (s->avctx->bits_per_raw_sample <= 8) {
143  int ret = decode_line(s, w, sample, plane_index, 8);
144  if (ret < 0)
145  return ret;
146  for (x = 0; x < w; x++)
147  src[x*pixel_stride + stride * y] = sample[1][x];
148  } else {
149  int ret = decode_line(s, w, sample, plane_index, s->avctx->bits_per_raw_sample);
150  if (ret < 0)
151  return ret;
152  if (s->packed_at_lsb) {
153  for (x = 0; x < w; x++) {
154  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x];
155  }
156  } else {
157  for (x = 0; x < w; x++) {
158  ((uint16_t*)(src + stride*y))[x*pixel_stride] = sample[1][x] << (16 - s->avctx->bits_per_raw_sample) | ((uint16_t **)sample)[1][x] >> (2 * s->avctx->bits_per_raw_sample - 16);
159  }
160  }
161  }
162 // STOP_TIMER("decode-line") }
163  }
164  return 0;
165 }
166 
168 {
169  RangeCoder *c = &fs->c;
171  unsigned ps, i, context_count;
172  int sx, sy, sw, sh;
173 
174  memset(state, 128, sizeof(state));
175  sx = get_symbol(c, state, 0);
176  sy = get_symbol(c, state, 0);
177  sw = get_symbol(c, state, 0) + 1U;
178  sh = get_symbol(c, state, 0) + 1U;
179 
180  av_assert0(f->version > 2);
181 
182 
183  if (sx < 0 || sy < 0 || sw <= 0 || sh <= 0)
184  return AVERROR_INVALIDDATA;
185  if (sx > f->num_h_slices - sw || sy > f->num_v_slices - sh)
186  return AVERROR_INVALIDDATA;
187 
188  fs->slice_x = sx * (int64_t)f->width / f->num_h_slices;
189  fs->slice_y = sy * (int64_t)f->height / f->num_v_slices;
190  fs->slice_width = (sx + sw) * (int64_t)f->width / f->num_h_slices - fs->slice_x;
191  fs->slice_height = (sy + sh) * (int64_t)f->height / f->num_v_slices - fs->slice_y;
192 
193  av_assert0((unsigned)fs->slice_width <= f->width &&
194  (unsigned)fs->slice_height <= f->height);
195  av_assert0 ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width <= f->width
196  && (unsigned)fs->slice_y + (uint64_t)fs->slice_height <= f->height);
197 
198  if (fs->ac == AC_GOLOMB_RICE && fs->slice_width >= (1<<23))
199  return AVERROR_INVALIDDATA;
200 
201  for (i = 0; i < f->plane_count; i++) {
202  PlaneContext * const p = &fs->plane[i];
203  int idx = get_symbol(c, state, 0);
204  if (idx >= (unsigned)f->quant_table_count) {
205  av_log(f->avctx, AV_LOG_ERROR, "quant_table_index out of range\n");
206  return -1;
207  }
208  p->quant_table_index = idx;
209  memcpy(p->quant_table, f->quant_tables[idx], sizeof(p->quant_table));
210  context_count = f->context_count[idx];
211 
212  if (p->context_count < context_count) {
213  av_freep(&p->state);
214  av_freep(&p->vlc_state);
215  }
217  }
218 
219  ps = get_symbol(c, state, 0);
220  if (ps == 1) {
221  f->cur->interlaced_frame = 1;
222  f->cur->top_field_first = 1;
223  } else if (ps == 2) {
224  f->cur->interlaced_frame = 1;
225  f->cur->top_field_first = 0;
226  } else if (ps == 3) {
227  f->cur->interlaced_frame = 0;
228  }
229  f->cur->sample_aspect_ratio.num = get_symbol(c, state, 0);
230  f->cur->sample_aspect_ratio.den = get_symbol(c, state, 0);
231 
232  if (av_image_check_sar(f->width, f->height,
233  f->cur->sample_aspect_ratio) < 0) {
234  av_log(f->avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
237  f->cur->sample_aspect_ratio = (AVRational){ 0, 1 };
238  }
239 
240  if (fs->version > 3) {
241  fs->slice_reset_contexts = get_rac(c, state);
242  fs->slice_coding_mode = get_symbol(c, state, 0);
243  if (fs->slice_coding_mode != 1) {
244  fs->slice_rct_by_coef = get_symbol(c, state, 0);
245  fs->slice_rct_ry_coef = get_symbol(c, state, 0);
246  if ((uint64_t)fs->slice_rct_by_coef + (uint64_t)fs->slice_rct_ry_coef > 4) {
247  av_log(f->avctx, AV_LOG_ERROR, "slice_rct_y_coef out of range\n");
248  return AVERROR_INVALIDDATA;
249  }
250  }
251  }
252 
253  return 0;
254 }
255 
256 static int decode_slice(AVCodecContext *c, void *arg)
257 {
258  FFV1Context *fs = *(void **)arg;
259  FFV1Context *f = fs->avctx->priv_data;
260  int width, height, x, y, ret;
261  const int ps = av_pix_fmt_desc_get(c->pix_fmt)->comp[0].step;
262  AVFrame * const p = f->cur;
263  int i, si;
264 
265  for( si=0; fs != f->slice_context[si]; si ++)
266  ;
267 
268  if(f->fsrc && !p->key_frame)
270 
271  if(f->fsrc && !p->key_frame) {
272  FFV1Context *fssrc = f->fsrc->slice_context[si];
273  FFV1Context *fsdst = f->slice_context[si];
274  av_assert1(fsdst->plane_count == fssrc->plane_count);
275  av_assert1(fsdst == fs);
276 
277  if (!p->key_frame)
278  fsdst->slice_damaged |= fssrc->slice_damaged;
279 
280  for (i = 0; i < f->plane_count; i++) {
281  PlaneContext *psrc = &fssrc->plane[i];
282  PlaneContext *pdst = &fsdst->plane[i];
283 
284  av_free(pdst->state);
285  av_free(pdst->vlc_state);
286  memcpy(pdst, psrc, sizeof(*pdst));
287  pdst->state = NULL;
288  pdst->vlc_state = NULL;
289 
290  if (fssrc->ac) {
292  memcpy(pdst->state, psrc->state, CONTEXT_SIZE * psrc->context_count);
293  } else {
294  pdst->vlc_state = av_malloc_array(sizeof(*pdst->vlc_state), psrc->context_count);
295  memcpy(pdst->vlc_state, psrc->vlc_state, sizeof(*pdst->vlc_state) * psrc->context_count);
296  }
297  }
298  }
299 
300  fs->slice_rct_by_coef = 1;
301  fs->slice_rct_ry_coef = 1;
302 
303  if (f->version > 2) {
304  if (ff_ffv1_init_slice_state(f, fs) < 0)
305  return AVERROR(ENOMEM);
306  if (decode_slice_header(f, fs) < 0) {
307  fs->slice_x = fs->slice_y = fs->slice_height = fs->slice_width = 0;
308  fs->slice_damaged = 1;
309  return AVERROR_INVALIDDATA;
310  }
311  }
312  if ((ret = ff_ffv1_init_slice_state(f, fs)) < 0)
313  return ret;
314  if (f->cur->key_frame || fs->slice_reset_contexts) {
316  } else if (fs->slice_damaged) {
317  return AVERROR_INVALIDDATA;
318  }
319 
320  width = fs->slice_width;
321  height = fs->slice_height;
322  x = fs->slice_x;
323  y = fs->slice_y;
324 
325  if (fs->ac == AC_GOLOMB_RICE) {
326  if (f->version == 3 && f->micro_version > 1 || f->version > 3)
327  get_rac(&fs->c, (uint8_t[]) { 129 });
328  fs->ac_byte_count = f->version > 2 || (!x && !y) ? fs->c.bytestream - fs->c.bytestream_start - 1 : 0;
329  init_get_bits(&fs->gb,
330  fs->c.bytestream_start + fs->ac_byte_count,
331  (fs->c.bytestream_end - fs->c.bytestream_start - fs->ac_byte_count) * 8);
332  }
333 
334  av_assert1(width && height);
335  if (f->colorspace == 0 && (f->chroma_planes || !fs->transparency)) {
336  const int chroma_width = AV_CEIL_RSHIFT(width, f->chroma_h_shift);
337  const int chroma_height = AV_CEIL_RSHIFT(height, f->chroma_v_shift);
338  const int cx = x >> f->chroma_h_shift;
339  const int cy = y >> f->chroma_v_shift;
340  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0], width, height, p->linesize[0], 0, 1);
341 
342  if (f->chroma_planes) {
343  decode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1, 1);
344  decode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1, 1);
345  }
346  if (fs->transparency)
347  decode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], (f->version >= 4 && !f->chroma_planes) ? 1 : 2, 1);
348  } else if (f->colorspace == 0) {
349  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] , width, height, p->linesize[0], 0, 2);
350  decode_plane(fs, p->data[0] + ps*x + y*p->linesize[0] + 1, width, height, p->linesize[0], 1, 2);
351  } else if (f->use32bit) {
352  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
353  p->data[1] + ps * x + y * p->linesize[1],
354  p->data[2] + ps * x + y * p->linesize[2],
355  p->data[3] + ps * x + y * p->linesize[3] };
356  decode_rgb_frame32(fs, planes, width, height, p->linesize);
357  } else {
358  uint8_t *planes[4] = { p->data[0] + ps * x + y * p->linesize[0],
359  p->data[1] + ps * x + y * p->linesize[1],
360  p->data[2] + ps * x + y * p->linesize[2],
361  p->data[3] + ps * x + y * p->linesize[3] };
362  decode_rgb_frame(fs, planes, width, height, p->linesize);
363  }
364  if (fs->ac != AC_GOLOMB_RICE && f->version > 2) {
365  int v;
366  get_rac(&fs->c, (uint8_t[]) { 129 });
367  v = fs->c.bytestream_end - fs->c.bytestream - 2 - 5*f->ec;
368  if (v) {
369  av_log(f->avctx, AV_LOG_ERROR, "bytestream end mismatching by %d\n", v);
370  fs->slice_damaged = 1;
371  }
372  }
373 
374  emms_c();
375 
377 
378  return 0;
379 }
380 
381 static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
382 {
383  int v;
384  int i = 0;
386 
387  memset(state, 128, sizeof(state));
388 
389  for (v = 0; i < 128; v++) {
390  unsigned len = get_symbol(c, state, 0) + 1U;
391 
392  if (len > 128 - i || !len)
393  return AVERROR_INVALIDDATA;
394 
395  while (len--) {
396  quant_table[i] = scale * v;
397  i++;
398  }
399  }
400 
401  for (i = 1; i < 128; i++)
402  quant_table[256 - i] = -quant_table[i];
403  quant_table[128] = -quant_table[127];
404 
405  return 2 * v - 1;
406 }
407 
409  int16_t quant_table[MAX_CONTEXT_INPUTS][256])
410 {
411  int i;
412  int context_count = 1;
413 
414  for (i = 0; i < 5; i++) {
415  int ret = read_quant_table(c, quant_table[i], context_count);
416  if (ret < 0)
417  return ret;
418  context_count *= ret;
419  if (context_count > 32768U) {
420  return AVERROR_INVALIDDATA;
421  }
422  }
423  return (context_count + 1) / 2;
424 }
425 
427 {
428  RangeCoder *const c = &f->c;
430  int i, j, k, ret;
431  uint8_t state2[32][CONTEXT_SIZE];
432  unsigned crc = 0;
433 
434  memset(state2, 128, sizeof(state2));
435  memset(state, 128, sizeof(state));
436 
438  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
439 
440  f->version = get_symbol(c, state, 0);
441  if (f->version < 2) {
442  av_log(f->avctx, AV_LOG_ERROR, "Invalid version in global header\n");
443  return AVERROR_INVALIDDATA;
444  }
445  if (f->version > 2) {
446  c->bytestream_end -= 4;
447  f->micro_version = get_symbol(c, state, 0);
448  if (f->micro_version < 0)
449  return AVERROR_INVALIDDATA;
450  }
451  f->ac = get_symbol(c, state, 0);
452 
453  if (f->ac == AC_RANGE_CUSTOM_TAB) {
454  for (i = 1; i < 256; i++)
455  f->state_transition[i] = get_symbol(c, state, 1) + c->one_state[i];
456  }
457 
458  f->colorspace = get_symbol(c, state, 0); //YUV cs type
459  f->avctx->bits_per_raw_sample = get_symbol(c, state, 0);
460  f->chroma_planes = get_rac(c, state);
461  f->chroma_h_shift = get_symbol(c, state, 0);
462  f->chroma_v_shift = get_symbol(c, state, 0);
463  f->transparency = get_rac(c, state);
464  f->plane_count = 1 + (f->chroma_planes || f->version<4) + f->transparency;
465  f->num_h_slices = 1 + get_symbol(c, state, 0);
466  f->num_v_slices = 1 + get_symbol(c, state, 0);
467 
468  if (f->chroma_h_shift > 4U || f->chroma_v_shift > 4U) {
469  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
471  return AVERROR_INVALIDDATA;
472  }
473 
474  if (f->num_h_slices > (unsigned)f->width || !f->num_h_slices ||
475  f->num_v_slices > (unsigned)f->height || !f->num_v_slices
476  ) {
477  av_log(f->avctx, AV_LOG_ERROR, "slice count invalid\n");
478  return AVERROR_INVALIDDATA;
479  }
480 
481  if (f->num_h_slices > MAX_SLICES / f->num_v_slices) {
482  av_log(f->avctx, AV_LOG_ERROR, "slice count unsupported\n");
483  return AVERROR_PATCHWELCOME;
484  }
485 
486  f->quant_table_count = get_symbol(c, state, 0);
487  if (f->quant_table_count > (unsigned)MAX_QUANT_TABLES || !f->quant_table_count) {
488  av_log(f->avctx, AV_LOG_ERROR, "quant table count %d is invalid\n", f->quant_table_count);
489  f->quant_table_count = 0;
490  return AVERROR_INVALIDDATA;
491  }
492 
493  for (i = 0; i < f->quant_table_count; i++) {
495  if (f->context_count[i] < 0) {
496  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
497  return AVERROR_INVALIDDATA;
498  }
499  }
500  if ((ret = ff_ffv1_allocate_initial_states(f)) < 0)
501  return ret;
502 
503  for (i = 0; i < f->quant_table_count; i++)
504  if (get_rac(c, state)) {
505  for (j = 0; j < f->context_count[i]; j++)
506  for (k = 0; k < CONTEXT_SIZE; k++) {
507  int pred = j ? f->initial_states[i][j - 1][k] : 128;
508  f->initial_states[i][j][k] =
509  (pred + get_symbol(c, state2[k], 1)) & 0xFF;
510  }
511  }
512 
513  if (f->version > 2) {
514  f->ec = get_symbol(c, state, 0);
515  if (f->micro_version > 2)
516  f->intra = get_symbol(c, state, 0);
517  }
518 
519  if (f->version > 2) {
520  unsigned v;
523  if (v || f->avctx->extradata_size < 4) {
524  av_log(f->avctx, AV_LOG_ERROR, "CRC mismatch %X!\n", v);
525  return AVERROR_INVALIDDATA;
526  }
527  crc = AV_RB32(f->avctx->extradata + f->avctx->extradata_size - 4);
528  }
529 
530  if (f->avctx->debug & FF_DEBUG_PICT_INFO)
532  "global: ver:%d.%d, coder:%d, colorspace: %d bpr:%d chroma:%d(%d:%d), alpha:%d slices:%dx%d qtabs:%d ec:%d intra:%d CRC:0x%08X\n",
533  f->version, f->micro_version,
534  f->ac,
535  f->colorspace,
538  f->transparency,
539  f->num_h_slices, f->num_v_slices,
541  f->ec,
542  f->intra,
543  crc
544  );
545  return 0;
546 }
547 
549 {
551  int i, j, context_count = -1; //-1 to avoid warning
552  RangeCoder *const c = &f->slice_context[0]->c;
553 
554  memset(state, 128, sizeof(state));
555 
556  if (f->version < 2) {
558  unsigned v= get_symbol(c, state, 0);
559  if (v >= 2) {
560  av_log(f->avctx, AV_LOG_ERROR, "invalid version %d in ver01 header\n", v);
561  return AVERROR_INVALIDDATA;
562  }
563  f->version = v;
564  f->ac = get_symbol(c, state, 0);
565 
566  if (f->ac == AC_RANGE_CUSTOM_TAB) {
567  for (i = 1; i < 256; i++) {
568  int st = get_symbol(c, state, 1) + c->one_state[i];
569  if (st < 1 || st > 255) {
570  av_log(f->avctx, AV_LOG_ERROR, "invalid state transition %d\n", st);
571  return AVERROR_INVALIDDATA;
572  }
573  f->state_transition[i] = st;
574  }
575  }
576 
577  colorspace = get_symbol(c, state, 0); //YUV cs type
578  bits_per_raw_sample = f->version > 0 ? get_symbol(c, state, 0) : f->avctx->bits_per_raw_sample;
579  chroma_planes = get_rac(c, state);
580  chroma_h_shift = get_symbol(c, state, 0);
581  chroma_v_shift = get_symbol(c, state, 0);
582  transparency = get_rac(c, state);
583  if (colorspace == 0 && f->avctx->skip_alpha)
584  transparency = 0;
585 
586  if (f->plane_count) {
587  if (colorspace != f->colorspace ||
588  bits_per_raw_sample != f->avctx->bits_per_raw_sample ||
589  chroma_planes != f->chroma_planes ||
590  chroma_h_shift != f->chroma_h_shift ||
591  chroma_v_shift != f->chroma_v_shift ||
592  transparency != f->transparency) {
593  av_log(f->avctx, AV_LOG_ERROR, "Invalid change of global parameters\n");
594  return AVERROR_INVALIDDATA;
595  }
596  }
597 
598  if (chroma_h_shift > 4U || chroma_v_shift > 4U) {
599  av_log(f->avctx, AV_LOG_ERROR, "chroma shift parameters %d %d are invalid\n",
600  chroma_h_shift, chroma_v_shift);
601  return AVERROR_INVALIDDATA;
602  }
603 
604  f->colorspace = colorspace;
610 
611  f->plane_count = 2 + f->transparency;
612  }
613 
614  if (f->colorspace == 0) {
615  if (!f->transparency && !f->chroma_planes) {
616  if (f->avctx->bits_per_raw_sample <= 8)
618  else if (f->avctx->bits_per_raw_sample == 9) {
619  f->packed_at_lsb = 1;
621  } else if (f->avctx->bits_per_raw_sample == 10) {
622  f->packed_at_lsb = 1;
624  } else if (f->avctx->bits_per_raw_sample == 12) {
625  f->packed_at_lsb = 1;
627  } else if (f->avctx->bits_per_raw_sample == 16) {
628  f->packed_at_lsb = 1;
630  } else if (f->avctx->bits_per_raw_sample < 16) {
632  } else
633  return AVERROR(ENOSYS);
634  } else if (f->transparency && !f->chroma_planes) {
635  if (f->avctx->bits_per_raw_sample <= 8)
637  else
638  return AVERROR(ENOSYS);
639  } else if (f->avctx->bits_per_raw_sample<=8 && !f->transparency) {
640  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
641  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P; break;
642  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P; break;
643  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P; break;
644  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P; break;
645  case 0x20: f->avctx->pix_fmt = AV_PIX_FMT_YUV411P; break;
646  case 0x22: f->avctx->pix_fmt = AV_PIX_FMT_YUV410P; break;
647  }
648  } else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency) {
649  switch(16*f->chroma_h_shift + f->chroma_v_shift) {
650  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P; break;
651  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; break;
652  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P; break;
653  }
654  } else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency) {
655  f->packed_at_lsb = 1;
656  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
657  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P9; break;
658  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P9; break;
659  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P9; break;
660  }
661  } else if (f->avctx->bits_per_raw_sample == 9 && f->transparency) {
662  f->packed_at_lsb = 1;
663  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
664  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P9; break;
665  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P9; break;
666  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P9; break;
667  }
668  } else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency) {
669  f->packed_at_lsb = 1;
670  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
671  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P10; break;
672  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P10; break;
673  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P10; break;
674  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P10; break;
675  }
676  } else if (f->avctx->bits_per_raw_sample == 10 && f->transparency) {
677  f->packed_at_lsb = 1;
678  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
679  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P10; break;
680  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P10; break;
681  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P10; break;
682  }
683  } else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency) {
684  f->packed_at_lsb = 1;
685  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
686  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P12; break;
687  case 0x01: f->avctx->pix_fmt = AV_PIX_FMT_YUV440P12; break;
688  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P12; break;
689  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P12; break;
690  }
691  } else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency) {
692  f->packed_at_lsb = 1;
693  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
694  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P14; break;
695  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P14; break;
696  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P14; break;
697  }
698  } else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency){
699  f->packed_at_lsb = 1;
700  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
701  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUV444P16; break;
702  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUV422P16; break;
703  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUV420P16; break;
704  }
705  } else if (f->avctx->bits_per_raw_sample == 16 && f->transparency){
706  f->packed_at_lsb = 1;
707  switch(16 * f->chroma_h_shift + f->chroma_v_shift) {
708  case 0x00: f->avctx->pix_fmt = AV_PIX_FMT_YUVA444P16; break;
709  case 0x10: f->avctx->pix_fmt = AV_PIX_FMT_YUVA422P16; break;
710  case 0x11: f->avctx->pix_fmt = AV_PIX_FMT_YUVA420P16; break;
711  }
712  }
713  } else if (f->colorspace == 1) {
714  if (f->chroma_h_shift || f->chroma_v_shift) {
716  "chroma subsampling not supported in this colorspace\n");
717  return AVERROR(ENOSYS);
718  }
719  if ( f->avctx->bits_per_raw_sample <= 8 && !f->transparency)
721  else if (f->avctx->bits_per_raw_sample <= 8 && f->transparency)
723  else if (f->avctx->bits_per_raw_sample == 9 && !f->transparency)
725  else if (f->avctx->bits_per_raw_sample == 10 && !f->transparency)
727  else if (f->avctx->bits_per_raw_sample == 10 && f->transparency)
729  else if (f->avctx->bits_per_raw_sample == 12 && !f->transparency)
731  else if (f->avctx->bits_per_raw_sample == 12 && f->transparency)
733  else if (f->avctx->bits_per_raw_sample == 14 && !f->transparency)
735  else if (f->avctx->bits_per_raw_sample == 16 && !f->transparency) {
737  f->use32bit = 1;
738  }
739  else if (f->avctx->bits_per_raw_sample == 16 && f->transparency) {
741  f->use32bit = 1;
742  }
743  } else {
744  av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
745  return AVERROR(ENOSYS);
746  }
747  if (f->avctx->pix_fmt == AV_PIX_FMT_NONE) {
748  av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
749  return AVERROR(ENOSYS);
750  }
751 
752  ff_dlog(f->avctx, "%d %d %d\n",
754  if (f->version < 2) {
755  context_count = read_quant_tables(c, f->quant_table);
756  if (context_count < 0) {
757  av_log(f->avctx, AV_LOG_ERROR, "read_quant_table error\n");
758  return AVERROR_INVALIDDATA;
759  }
761  } else if (f->version < 3) {
762  f->slice_count = get_symbol(c, state, 0);
763  } else {
764  const uint8_t *p = c->bytestream_end;
765  for (f->slice_count = 0;
766  f->slice_count < MAX_SLICES && 3 + 5*!!f->ec < p - c->bytestream_start;
767  f->slice_count++) {
768  int trailer = 3 + 5*!!f->ec;
769  int size = AV_RB24(p-trailer);
770  if (size + trailer > p - c->bytestream_start)
771  break;
772  p -= size + trailer;
773  }
774  }
775  if (f->slice_count > (unsigned)MAX_SLICES || f->slice_count <= 0 || f->slice_count > f->max_slice_count) {
776  av_log(f->avctx, AV_LOG_ERROR, "slice count %d is invalid (max=%d)\n", f->slice_count, f->max_slice_count);
777  return AVERROR_INVALIDDATA;
778  }
779 
780  for (j = 0; j < f->slice_count; j++) {
781  FFV1Context *fs = f->slice_context[j];
782  fs->ac = f->ac;
783  fs->packed_at_lsb = f->packed_at_lsb;
784 
785  fs->slice_damaged = 0;
786 
787  if (f->version == 2) {
788  int sx = get_symbol(c, state, 0);
789  int sy = get_symbol(c, state, 0);
790  int sw = get_symbol(c, state, 0) + 1U;
791  int sh = get_symbol(c, state, 0) + 1U;
792 
793  if (sx < 0 || sy < 0 || sw <= 0 || sh <= 0)
794  return AVERROR_INVALIDDATA;
795  if (sx > f->num_h_slices - sw || sy > f->num_v_slices - sh)
796  return AVERROR_INVALIDDATA;
797 
798  fs->slice_x = sx * (int64_t)f->width / f->num_h_slices;
799  fs->slice_y = sy * (int64_t)f->height / f->num_v_slices;
800  fs->slice_width = (sx + sw) * (int64_t)f->width / f->num_h_slices - fs->slice_x;
801  fs->slice_height = (sy + sh) * (int64_t)f->height / f->num_v_slices - fs->slice_y;
802 
803  av_assert0((unsigned)fs->slice_width <= f->width &&
804  (unsigned)fs->slice_height <= f->height);
805  av_assert0 ( (unsigned)fs->slice_x + (uint64_t)fs->slice_width <= f->width
806  && (unsigned)fs->slice_y + (uint64_t)fs->slice_height <= f->height);
807  }
808 
809  for (i = 0; i < f->plane_count; i++) {
810  PlaneContext *const p = &fs->plane[i];
811 
812  if (f->version == 2) {
813  int idx = get_symbol(c, state, 0);
814  if (idx >= (unsigned)f->quant_table_count) {
816  "quant_table_index out of range\n");
817  return AVERROR_INVALIDDATA;
818  }
819  p->quant_table_index = idx;
820  memcpy(p->quant_table, f->quant_tables[idx],
821  sizeof(p->quant_table));
822  context_count = f->context_count[idx];
823  } else {
824  memcpy(p->quant_table, f->quant_table, sizeof(p->quant_table));
825  }
826 
827  if (f->version <= 2) {
828  av_assert0(context_count >= 0);
829  if (p->context_count < context_count) {
830  av_freep(&p->state);
831  av_freep(&p->vlc_state);
832  }
834  }
835  }
836  }
837  return 0;
838 }
839 
841 {
842  FFV1Context *f = avctx->priv_data;
843  int ret;
844 
845  if ((ret = ff_ffv1_common_init(avctx)) < 0)
846  return ret;
847 
848  if (avctx->extradata_size > 0 && (ret = read_extra_header(f)) < 0)
849  return ret;
850 
851  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
852  return ret;
853 
854  avctx->internal->allocate_progress = 1;
855 
856  return 0;
857 }
858 
859 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
860 {
861  uint8_t *buf = avpkt->data;
862  int buf_size = avpkt->size;
863  FFV1Context *f = avctx->priv_data;
864  RangeCoder *const c = &f->slice_context[0]->c;
865  int i, ret;
866  uint8_t keystate = 128;
867  uint8_t *buf_p;
868  AVFrame *p;
869 
870  if (f->last_picture.f)
873 
874  f->cur = p = f->picture.f;
875 
876  if (f->version < 3 && avctx->field_order > AV_FIELD_PROGRESSIVE) {
877  /* we have interlaced material flagged in container */
878  p->interlaced_frame = 1;
879  if (avctx->field_order == AV_FIELD_TT || avctx->field_order == AV_FIELD_TB)
880  p->top_field_first = 1;
881  }
882 
883  f->avctx = avctx;
884  ff_init_range_decoder(c, buf, buf_size);
885  ff_build_rac_states(c, 0.05 * (1LL << 32), 256 - 8);
886 
887  p->pict_type = AV_PICTURE_TYPE_I; //FIXME I vs. P
888  if (get_rac(c, &keystate)) {
889  p->key_frame = 1;
890  f->key_frame_ok = 0;
891  if ((ret = read_header(f)) < 0)
892  return ret;
893  f->key_frame_ok = 1;
894  } else {
895  if (!f->key_frame_ok) {
896  av_log(avctx, AV_LOG_ERROR,
897  "Cannot decode non-keyframe without valid keyframe\n");
898  return AVERROR_INVALIDDATA;
899  }
900  p->key_frame = 0;
901  }
902 
903  if ((ret = ff_thread_get_buffer(avctx, &f->picture, AV_GET_BUFFER_FLAG_REF)) < 0)
904  return ret;
905 
906  if (avctx->debug & FF_DEBUG_PICT_INFO)
907  av_log(avctx, AV_LOG_DEBUG, "ver:%d keyframe:%d coder:%d ec:%d slices:%d bps:%d\n",
908  f->version, p->key_frame, f->ac, f->ec, f->slice_count, f->avctx->bits_per_raw_sample);
909 
910  ff_thread_finish_setup(avctx);
911 
912  buf_p = buf + buf_size;
913  for (i = f->slice_count - 1; i >= 0; i--) {
914  FFV1Context *fs = f->slice_context[i];
915  int trailer = 3 + 5*!!f->ec;
916  int v;
917 
918  if (i || f->version > 2) {
919  if (trailer > buf_p - buf) v = INT_MAX;
920  else v = AV_RB24(buf_p-trailer) + trailer;
921  } else v = buf_p - c->bytestream_start;
922  if (buf_p - c->bytestream_start < v) {
923  av_log(avctx, AV_LOG_ERROR, "Slice pointer chain broken\n");
924  ff_thread_report_progress(&f->picture, INT_MAX, 0);
925  return AVERROR_INVALIDDATA;
926  }
927  buf_p -= v;
928 
929  if (f->ec) {
930  unsigned crc = av_crc(av_crc_get_table(AV_CRC_32_IEEE), 0, buf_p, v);
931  if (crc) {
932  int64_t ts = avpkt->pts != AV_NOPTS_VALUE ? avpkt->pts : avpkt->dts;
933  av_log(f->avctx, AV_LOG_ERROR, "slice CRC mismatch %X!", crc);
934  if (ts != AV_NOPTS_VALUE && avctx->pkt_timebase.num) {
935  av_log(f->avctx, AV_LOG_ERROR, "at %f seconds\n", ts*av_q2d(avctx->pkt_timebase));
936  } else if (ts != AV_NOPTS_VALUE) {
937  av_log(f->avctx, AV_LOG_ERROR, "at %"PRId64"\n", ts);
938  } else {
939  av_log(f->avctx, AV_LOG_ERROR, "\n");
940  }
941  fs->slice_damaged = 1;
942  }
943  if (avctx->debug & FF_DEBUG_PICT_INFO) {
944  av_log(avctx, AV_LOG_DEBUG, "slice %d, CRC: 0x%08"PRIX32"\n", i, AV_RB32(buf_p + v - 4));
945  }
946  }
947 
948  if (i) {
949  ff_init_range_decoder(&fs->c, buf_p, v);
950  } else
951  fs->c.bytestream_end = buf_p + v;
952 
953  fs->avctx = avctx;
954  fs->cur = p;
955  }
956 
957  avctx->execute(avctx,
958  decode_slice,
959  &f->slice_context[0],
960  NULL,
961  f->slice_count,
962  sizeof(void*));
963 
964  for (i = f->slice_count - 1; i >= 0; i--) {
965  FFV1Context *fs = f->slice_context[i];
966  int j;
967  if (fs->slice_damaged && f->last_picture.f->data[0]) {
969  const uint8_t *src[4];
970  uint8_t *dst[4];
971  ff_thread_await_progress(&f->last_picture, INT_MAX, 0);
972  for (j = 0; j < desc->nb_components; j++) {
973  int pixshift = desc->comp[j].depth > 8;
974  int sh = (j == 1 || j == 2) ? f->chroma_h_shift : 0;
975  int sv = (j == 1 || j == 2) ? f->chroma_v_shift : 0;
976  dst[j] = p->data[j] + p->linesize[j] *
977  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
978  src[j] = f->last_picture.f->data[j] + f->last_picture.f->linesize[j] *
979  (fs->slice_y >> sv) + ((fs->slice_x >> sh) << pixshift);
980 
981  }
982  if (desc->flags & AV_PIX_FMT_FLAG_PAL ||
983  desc->flags & FF_PSEUDOPAL) {
984  dst[1] = p->data[1];
985  src[1] = f->last_picture.f->data[1];
986  }
987  av_image_copy(dst, p->linesize, src,
988  f->last_picture.f->linesize,
989  avctx->pix_fmt,
990  fs->slice_width,
991  fs->slice_height);
992  }
993  }
994  ff_thread_report_progress(&f->picture, INT_MAX, 0);
995 
996  f->picture_number++;
997 
998  if (f->last_picture.f)
1000  f->cur = NULL;
1001  if ((ret = av_frame_ref(data, f->picture.f)) < 0)
1002  return ret;
1003 
1004  *got_frame = 1;
1005 
1006  return buf_size;
1007 }
1008 
1009 #if HAVE_THREADS
1011 {
1012  FFV1Context *f = avctx->priv_data;
1013  int i, ret;
1014 
1015  f->picture.f = NULL;
1016  f->last_picture.f = NULL;
1017  f->sample_buffer = NULL;
1018  f->max_slice_count = 0;
1019  f->slice_count = 0;
1020 
1021  for (i = 0; i < f->quant_table_count; i++) {
1022  av_assert0(f->version > 1);
1024  f->context_count[i] * sizeof(*f->initial_states[i]));
1025  }
1026 
1027  f->picture.f = av_frame_alloc();
1028  f->last_picture.f = av_frame_alloc();
1029 
1030  if ((ret = ff_ffv1_init_slice_contexts(f)) < 0)
1031  return ret;
1032 
1033  return 0;
1034 }
1035 #endif
1036 
1037 static void copy_fields(FFV1Context *fsdst, FFV1Context *fssrc, FFV1Context *fsrc)
1038 {
1039  fsdst->version = fsrc->version;
1040  fsdst->micro_version = fsrc->micro_version;
1041  fsdst->chroma_planes = fsrc->chroma_planes;
1042  fsdst->chroma_h_shift = fsrc->chroma_h_shift;
1043  fsdst->chroma_v_shift = fsrc->chroma_v_shift;
1044  fsdst->transparency = fsrc->transparency;
1045  fsdst->plane_count = fsrc->plane_count;
1046  fsdst->ac = fsrc->ac;
1047  fsdst->colorspace = fsrc->colorspace;
1048 
1049  fsdst->ec = fsrc->ec;
1050  fsdst->intra = fsrc->intra;
1051  fsdst->slice_damaged = fssrc->slice_damaged;
1052  fsdst->key_frame_ok = fsrc->key_frame_ok;
1053 
1055  fsdst->packed_at_lsb = fsrc->packed_at_lsb;
1056  fsdst->slice_count = fsrc->slice_count;
1057  if (fsrc->version<3){
1058  fsdst->slice_x = fssrc->slice_x;
1059  fsdst->slice_y = fssrc->slice_y;
1060  fsdst->slice_width = fssrc->slice_width;
1061  fsdst->slice_height = fssrc->slice_height;
1062  }
1063 }
1064 
1065 #if HAVE_THREADS
1066 static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
1067 {
1068  FFV1Context *fsrc = src->priv_data;
1069  FFV1Context *fdst = dst->priv_data;
1070  int i, ret;
1071 
1072  if (dst == src)
1073  return 0;
1074 
1075  {
1079  memcpy(initial_states, fdst->initial_states, sizeof(fdst->initial_states));
1080  memcpy(slice_context, fdst->slice_context , sizeof(fdst->slice_context));
1081 
1082  memcpy(fdst, fsrc, sizeof(*fdst));
1083  memcpy(fdst->initial_states, initial_states, sizeof(fdst->initial_states));
1084  memcpy(fdst->slice_context, slice_context , sizeof(fdst->slice_context));
1085  fdst->picture = picture;
1086  fdst->last_picture = last_picture;
1087  for (i = 0; i<fdst->num_h_slices * fdst->num_v_slices; i++) {
1088  FFV1Context *fssrc = fsrc->slice_context[i];
1089  FFV1Context *fsdst = fdst->slice_context[i];
1090  copy_fields(fsdst, fssrc, fsrc);
1091  }
1092  av_assert0(!fdst->plane[0].state);
1093  av_assert0(!fdst->sample_buffer);
1094  }
1095 
1096  av_assert1(fdst->max_slice_count == fsrc->max_slice_count);
1097 
1098 
1099  ff_thread_release_buffer(dst, &fdst->picture);
1100  if (fsrc->picture.f->data[0]) {
1101  if ((ret = ff_thread_ref_frame(&fdst->picture, &fsrc->picture)) < 0)
1102  return ret;
1103  }
1104 
1105  fdst->fsrc = fsrc;
1106 
1107  return 0;
1108 }
1109 #endif
1110 
1112  .name = "ffv1",
1113  .long_name = NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"),
1114  .type = AVMEDIA_TYPE_VIDEO,
1115  .id = AV_CODEC_ID_FFV1,
1116  .priv_data_size = sizeof(FFV1Context),
1117  .init = decode_init,
1118  .close = ff_ffv1_close,
1119  .decode = decode_frame,
1121  .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
1122  .capabilities = AV_CODEC_CAP_DR1 /*| AV_CODEC_CAP_DRAW_HORIZ_BAND*/ |
1124  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP
1125 };
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
static av_always_inline int fold(int diff, int bits)
Definition: ffv1.h:151
#define NULL
Definition: coverity.c:32
#define AV_PIX_FMT_YUVA422P16
Definition: pixfmt.h:420
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define AV_PIX_FMT_YUV440P10
Definition: pixfmt.h:381
#define AV_PIX_FMT_YUVA422P9
Definition: pixfmt.h:414
int size
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2446
This structure describes decoded (raw) audio or video data.
Definition: frame.h:226
#define AV_PIX_FMT_YUVA420P10
Definition: pixfmt.h:416
#define AV_PIX_FMT_YUV444P14
Definition: pixfmt.h:389
8 bits gray, 8 bits alpha
Definition: pixfmt.h:143
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:399
#define AV_PIX_FMT_YUVA422P10
Definition: pixfmt.h:417
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
static int init_thread_copy(AVCodecContext *avctx)
Definition: tta.c:399
AVFrame * f
Definition: thread.h:35
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:69
int quant_table_count
Definition: ffv1.h:126
else temp
Definition: vf_mcdeint.c:256
const char * desc
Definition: nvenc.c:65
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:256
int slice_height
Definition: ffv1.h:134
#define MAX_CONTEXT_INPUTS
Definition: ffv1.h:54
int16_t * sample_buffer
Definition: ffv1.h:111
int version
Definition: ffv1.h:87
int micro_version
Definition: ffv1.h:88
Range coder.
uint8_t * bytestream_end
Definition: rangecoder.h:44
int num
Numerator.
Definition: rational.h:59
int size
Definition: avcodec.h:1446
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:395
static int read_quant_table(RangeCoder *c, int16_t *quant_table, int scale)
Definition: ffv1dec.c:381
#define AV_RB24
Definition: intreadwrite.h:64
static av_flatten int get_symbol_inline(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:42
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1743
#define AV_PIX_FMT_GRAY9
Definition: pixfmt.h:359
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:383
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: ffv1dec.c:859
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
Definition: avcodec.h:2757
FF Video Codec 1 (a lossless codec)
void ff_thread_await_progress(ThreadFrame *f, int n, int field)
Wait for earlier decoding threads to finish reference pictures.
#define src
Definition: vp8dsp.c:254
#define sample
int height
Definition: ffv1.h:89
int stride
Definition: mace.c:144
AVCodec.
Definition: avcodec.h:3424
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
uint8_t one_state[256]
Definition: rangecoder.h:41
int slice_reset_contexts
Definition: ffv1.h:137
enum AVPictureType last_picture
Definition: movenc.c:68
int slice_rct_by_coef
Definition: ffv1.h:139
#define AV_PIX_FMT_GRAY10
Definition: pixfmt.h:360
int plane_count
Definition: ffv1.h:100
int slice_damaged
Definition: ffv1.h:118
static int is_input_end(FFV1Context *s)
Definition: ffv1dec.c:96
#define AV_PIX_FMT_GRAY12
Definition: pixfmt.h:361
ThreadFrame picture
Definition: ffv1.h:96
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:101
static int read_quant_tables(RangeCoder *c, int16_t quant_table[MAX_CONTEXT_INPUTS][256])
Definition: ffv1dec.c:408
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:82
static int get_rac(RangeCoder *c, uint8_t *const state)
Definition: rangecoder.h:119
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:259
AVOptions.
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:2615
int8_t bias
Definition: ffv1.h:64
#define f(width, name)
Definition: cbs_vp9.c:255
#define AV_RB32
Definition: intreadwrite.h:130
RangeCoder c
Definition: ffv1.h:82
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
#define emms_c()
Definition: internal.h:55
av_cold int ff_ffv1_common_init(AVCodecContext *avctx)
Definition: ffv1.c:42
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1634
#define AV_PIX_FMT_YUVA420P9
Definition: pixfmt.h:413
#define AV_PIX_FMT_GBRP9
Definition: pixfmt.h:394
int slice_y
Definition: ffv1.h:136
uint8_t(*[MAX_QUANT_TABLES] initial_states)[32]
Definition: ffv1.h:108
ThreadFrame last_picture
Definition: ffv1.h:96
Public header for CRC hash function implementation.
av_cold int ff_ffv1_close(AVCodecContext *avctx)
Definition: ffv1.c:210
const char data[16]
Definition: mxf.c:91
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:287
#define height
uint8_t * data
Definition: avcodec.h:1445
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
uint8_t count
Definition: ffv1.h:65
int ff_thread_ref_frame(ThreadFrame *dst, ThreadFrame *src)
Definition: utils.c:1867
#define ff_dlog(a,...)
bitstream reader API header.
static av_always_inline int RENAME() decode_line(FFV1Context *s, int w, TYPE *sample[2], int plane_index, int bits)
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:392
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:373
VlcState * vlc_state
Definition: ffv1.h:73
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:384
void ff_thread_finish_setup(AVCodecContext *avctx)
If the codec defines update_thread_context(), call this when they are ready for the next thread to st...
#define AV_PIX_FMT_YUVA420P16
Definition: pixfmt.h:419
high precision timer, useful to profile code
#define av_log(a,...)
int bits_per_raw_sample
Definition: ffv1.h:122
int slice_width
Definition: ffv1.h:133
GetBitContext gb
Definition: ffv1.h:83
#define U(x)
Definition: vp56_arith.h:37
AVFrame * cur
Definition: ffv1.h:99
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:3070
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:814
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:176
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:258
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static int decode_slice_header(FFV1Context *f, FFV1Context *fs)
Definition: ffv1dec.c:167
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
int16_t quant_tables[MAX_QUANT_TABLES][MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:105
#define AVERROR(e)
Definition: error.h:43
int skip_alpha
Skip processing alpha if supported by codec.
Definition: avcodec.h:3129
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:188
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
int context_count
Definition: ffv1.h:71
#define AV_PIX_FMT_YUVA444P16
Definition: pixfmt.h:421
const char * arg
Definition: jacosubdec.c:66
#define AV_PIX_FMT_GBRAP12
Definition: pixfmt.h:400
simple assert() macros that are a bit more flexible than ISO C assert().
const char * name
Name of the codec implementation.
Definition: avcodec.h:3431
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:382
int ff_ffv1_allocate_initial_states(FFV1Context *f)
Definition: ffv1.c:167
#define MAX_SLICES
Definition: dxva2_hevc.c:29
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
Definition: mem.c:283
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1024
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:387
#define AV_PIX_FMT_GBRAP16
Definition: pixfmt.h:401
static int get_vlc_symbol(GetBitContext *gb, VlcState *const state, int bits)
Definition: ffv1dec.c:71
uint8_t * bytestream
Definition: rangecoder.h:43
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
Definition: internal.h:227
int ac
1=range coder <-> 0=golomb rice
Definition: ffv1.h:101
int16_t quant_table[MAX_CONTEXT_INPUTS][256]
Definition: ffv1.h:104
static struct @303 state
#define AC_RANGE_CUSTOM_TAB
Definition: ffv1.h:58
int run_index
Definition: ffv1.h:109
Definition: ffv1.h:61
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
#define AV_PIX_FMT_YUV422P9
Definition: pixfmt.h:377
static const struct @304 planes[]
#define av_flatten
Definition: attributes.h:88
static av_noinline int get_symbol(RangeCoder *c, uint8_t *state, int is_signed)
Definition: ffv1dec.c:66
uint8_t state_transition[256]
Definition: ffv1.h:107
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
static void copy_fields(FFV1Context *fsdst, FFV1Context *fssrc, FFV1Context *fsrc)
Definition: ffv1dec.c:1037
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:309
#define AV_PIX_FMT_GBRP16
Definition: pixfmt.h:398
#define AV_PIX_FMT_GRAY16
Definition: pixfmt.h:363
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:53
static int decode_plane(FFV1Context *s, uint8_t *src, int w, int h, int stride, int plane_index, int pixel_stride)
Definition: ffv1dec.c:119
#define FFMIN(a, b)
Definition: common.h:96
int num_h_slices
Definition: ffv1.h:132
#define width
uint8_t w
Definition: llviddspenc.c:38
#define MAX_QUANT_TABLES
Definition: ffv1.h:53
int colorspace
Definition: ffv1.h:110
static float quant_table[96]
Definition: binkaudio.c:43
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
static void update_vlc_state(VlcState *const state, const int v)
Definition: ffv1.h:164
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
Definition: crc.c:392
int slice_count
Definition: ffv1.h:129
int max_slice_count
Definition: ffv1.h:130
void ff_build_rac_states(RangeCoder *c, int factor, int max_p)
Definition: rangecoder.c:68
#define s(width, name)
Definition: cbs_vp9.c:257
av_cold int ff_ffv1_init_slice_contexts(FFV1Context *f)
Definition: ffv1.c:117
av_cold int ff_ffv1_init_slice_state(FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:67
#define AV_PIX_FMT_YUVA444P10
Definition: pixfmt.h:418
int ac_byte_count
number of bytes used for AC coding
Definition: ffv1.h:102
int16_t drift
Definition: ffv1.h:62
int packed_at_lsb
Definition: ffv1.h:123
#define AV_PIX_FMT_YUV444P9
Definition: pixfmt.h:378
#define AV_PIX_FMT_GBRP14
Definition: pixfmt.h:397
static int read_header(FFV1Context *f)
Definition: ffv1dec.c:548
static const float pred[4]
Definition: siprdata.h:259
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
#define AV_PIX_FMT_YUV420P16
Definition: pixfmt.h:390
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: avcodec.h:1028
#define AV_PIX_FMT_YUV420P14
Definition: pixfmt.h:387
int context_count[MAX_QUANT_TABLES]
Definition: ffv1.h:106
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:257
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:177
int debug
debug
Definition: avcodec.h:2614
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1533
#define AV_PIX_FMT_RGB32
Definition: pixfmt.h:352
int intra
Definition: ffv1.h:117
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:314
void * buf
Definition: avisynth_c.h:690
int extradata_size
Definition: avcodec.h:1635
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:379
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:72
int use32bit
Definition: ffv1.h:114
Rational number (pair of numerator and denominator).
Definition: rational.h:58
av_cold void ff_init_range_decoder(RangeCoder *c, const uint8_t *buf, int buf_size)
Definition: rangecoder.c:53
#define AC_GOLOMB_RICE
Definition: ffv1.h:56
#define MAX_OVERREAD
Definition: lagarithrac.h:51
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:615
#define AV_PIX_FMT_YUV440P12
Definition: pixfmt.h:385
int picture_number
Definition: ffv1.h:94
uint16_t error_sum
Definition: ffv1.h:63
#define AV_PIX_FMT_YUV420P9
Definition: pixfmt.h:376
int allocate_progress
Whether to allocate progress for frame threading.
Definition: internal.h:151
int key_frame_ok
Definition: ffv1.h:119
#define AV_PIX_FMT_YUV422P14
Definition: pixfmt.h:388
#define AV_PIX_FMT_GBRP12
Definition: pixfmt.h:396
#define CONTEXT_SIZE
Definition: ffv1.h:51
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:380
int quant_table_index
Definition: ffv1.h:70
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:386
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:240
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
Definition: crc.c:374
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
Y , 8bpp.
Definition: pixfmt.h:74
common internal api header.
int overread
Definition: rangecoder.h:45
static double c[64]
void ff_ffv1_clear_slice_state(FFV1Context *f, FFV1Context *fs)
Definition: ffv1.c:182
#define AV_PIX_FMT_YUVA444P9
Definition: pixfmt.h:415
uint8_t(* state)[CONTEXT_SIZE]
Definition: ffv1.h:72
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:73
int den
Denominator.
Definition: rational.h:60
int slice_coding_mode
Definition: ffv1.h:138
uint8_t * bytestream_start
Definition: rangecoder.h:42
static av_cold int decode_init(AVCodecContext *avctx)
Definition: ffv1dec.c:840
void * priv_data
Definition: avcodec.h:1560
int chroma_h_shift
Definition: ffv1.h:91
PlaneContext plane[MAX_PLANES]
Definition: ffv1.h:103
#define FF_PSEUDOPAL
Definition: internal.h:369
int transparency
Definition: ffv1.h:92
#define av_free(p)
struct FFV1Context * fsrc
Definition: ffv1.h:97
int chroma_v_shift
Definition: ffv1.h:91
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:378
int len
int chroma_planes
Definition: ffv1.h:90
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1568
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:304
struct FFV1Context * slice_context[MAX_SLICES]
Definition: ffv1.h:128
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1444
#define av_noinline
Definition: attributes.h:62
#define av_freep(p)
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:99
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2186
#define av_malloc_array(a, b)
#define FFSWAP(type, a, b)
Definition: common.h:99
int ec
Definition: ffv1.h:116
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static int get_sr_golomb(GetBitContext *gb, int k, int limit, int esc_len)
read signed golomb rice code (ffv1).
Definition: golomb.h:525
int num_v_slices
Definition: ffv1.h:131
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:2825
exp golomb vlc stuff
static int RENAME() decode_rgb_frame(FFV1Context *s, uint8_t *src[4], int w, int h, int stride[4])
This structure stores compressed data.
Definition: avcodec.h:1422
static int read_extra_header(FFV1Context *f)
Definition: ffv1dec.c:426
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1144
AVCodecContext * avctx
Definition: ffv1.h:81
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:968
int slice_x
Definition: ffv1.h:135
#define AV_PIX_FMT_YUV422P16
Definition: pixfmt.h:391
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1438
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVCodec ff_ffv1_decoder
Definition: ffv1dec.c:1111
int step
Number of elements between 2 horizontally consecutive pixels.
Definition: pixdesc.h:41
int width
Definition: ffv1.h:89
#define AV_PIX_FMT_0RGB32
Definition: pixfmt.h:356
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:58
int slice_rct_ry_coef
Definition: ffv1.h:140