FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "decode.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
98 };
99 
100 static const struct {
101 
104 
105 } color_formats[] = {
106 
114  { 0 }
115 };
116 
119  int color_format)
120 {
121  int i;
123 
124  if (s->surface) {
125  return AV_PIX_FMT_MEDIACODEC;
126  }
127 
128  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
130  }
131 
132  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
134  return color_formats[i].pix_fmt;
135  }
136  }
137 
138  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
140 
141  return ret;
142 }
143 
145 {
146  atomic_fetch_add(&s->refcount, 1);
147 }
148 
150 {
151  if (!s)
152  return;
153 
154  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
155  if (s->codec) {
156  ff_AMediaCodec_delete(s->codec);
157  s->codec = NULL;
158  }
159 
160  if (s->format) {
161  ff_AMediaFormat_delete(s->format);
162  s->format = NULL;
163  }
164 
165  if (s->surface) {
167  s->surface = NULL;
168  }
169 
170  av_freep(&s->codec_name);
171  av_freep(&s);
172  }
173 }
174 
175 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
176 {
177  AVMediaCodecBuffer *buffer = opaque;
179  int released = atomic_load(&buffer->released);
180 
181  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
182  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
183  av_log(ctx->avctx, AV_LOG_DEBUG,
184  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
185  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
186  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
187  }
188 
190  av_freep(&buffer);
191 }
192 
195  ssize_t index,
197  AVFrame *frame)
198 {
199  int ret = 0;
200  int status = 0;
201  AVMediaCodecBuffer *buffer = NULL;
202 
203  frame->buf[0] = NULL;
204  frame->width = avctx->width;
205  frame->height = avctx->height;
206  frame->format = avctx->pix_fmt;
207  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
208 
209  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
210  frame->pts = av_rescale_q(info->presentationTimeUs,
212  avctx->pkt_timebase);
213  } else {
214  frame->pts = info->presentationTimeUs;
215  }
216  frame->pkt_dts = AV_NOPTS_VALUE;
217  frame->color_range = avctx->color_range;
218  frame->color_primaries = avctx->color_primaries;
219  frame->color_trc = avctx->color_trc;
220  frame->colorspace = avctx->colorspace;
221 
222  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
223  if (!buffer) {
224  ret = AVERROR(ENOMEM);
225  goto fail;
226  }
227 
228  atomic_init(&buffer->released, 0);
229 
230  frame->buf[0] = av_buffer_create(NULL,
231  0,
233  buffer,
235 
236  if (!frame->buf[0]) {
237  ret = AVERROR(ENOMEM);
238  goto fail;
239 
240  }
241 
242  buffer->ctx = s;
243  buffer->serial = atomic_load(&s->serial);
245 
246  buffer->index = index;
247  buffer->pts = info->presentationTimeUs;
248 
249  frame->data[3] = (uint8_t *)buffer;
250 
251  atomic_fetch_add(&s->hw_buffer_count, 1);
252  av_log(avctx, AV_LOG_DEBUG,
253  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
254  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
255 
256  return 0;
257 fail:
258  av_freep(&buffer);
260  if (status < 0) {
261  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
263  }
264 
265  return ret;
266 }
267 
270  uint8_t *data,
271  size_t size,
272  ssize_t index,
274  AVFrame *frame)
275 {
276  int ret = 0;
277  int status = 0;
278 
279  frame->width = avctx->width;
280  frame->height = avctx->height;
281  frame->format = avctx->pix_fmt;
282 
283  /* MediaCodec buffers needs to be copied to our own refcounted buffers
284  * because the flush command invalidates all input and output buffers.
285  */
286  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
287  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
288  goto done;
289  }
290 
291  /* Override frame->pkt_pts as ff_get_buffer will override its value based
292  * on the last avpacket received which is not in sync with the frame:
293  * * N avpackets can be pushed before 1 frame is actually returned
294  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
295  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
296  frame->pts = av_rescale_q(info->presentationTimeUs,
298  avctx->pkt_timebase);
299  } else {
300  frame->pts = info->presentationTimeUs;
301  }
302  frame->pkt_dts = AV_NOPTS_VALUE;
303 
304  av_log(avctx, AV_LOG_TRACE,
305  "Frame: width=%d stride=%d height=%d slice-height=%d "
306  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
307  "destination linesizes=%d,%d,%d\n" ,
308  avctx->width, s->stride, avctx->height, s->slice_height,
309  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
310  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
311 
312  switch (s->color_format) {
315  break;
320  break;
324  break;
327  break;
328  default:
329  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
330  s->color_format, s->color_format);
331  ret = AVERROR(EINVAL);
332  goto done;
333  }
334 
335  ret = 0;
336 done:
338  if (status < 0) {
339  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
341  }
342 
343  return ret;
344 }
345 
346 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
347  int32_t value = 0; \
348  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
349  (name) = value; \
350  } else if (mandatory) { \
351  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
352  ret = AVERROR_EXTERNAL; \
353  goto fail; \
354  } \
355 } while (0) \
356 
358 {
359  int ret = 0;
360  int width = 0;
361  int height = 0;
362  int color_range = 0;
363  int color_standard = 0;
364  int color_transfer = 0;
365  char *format = NULL;
366 
367  if (!s->format) {
368  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
369  return AVERROR(EINVAL);
370  }
371 
372  format = ff_AMediaFormat_toString(s->format);
373  if (!format) {
374  return AVERROR_EXTERNAL;
375  }
376  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
377 
378  /* Mandatory fields */
379  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
380  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
381 
382  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
383  s->stride = s->stride > 0 ? s->stride : s->width;
384 
385  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
386 
387  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
388  s->slice_height = FFALIGN(s->height, 16);
389  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
390  s->slice_height = avctx->height;
391  s->stride = avctx->width;
392  } else if (s->slice_height == 0) {
393  s->slice_height = s->height;
394  }
395 
396  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
397  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
398  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
399  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
400  ret = AVERROR(EINVAL);
401  goto fail;
402  }
403 
404  /* Optional fields */
405  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
406  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
407  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
408  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
409 
410  // Try "crop" for NDK
411  if (!(s->crop_right && s->crop_bottom) && s->use_ndk_codec)
412  ff_AMediaFormat_getRect(s->format, "crop", &s->crop_left, &s->crop_top, &s->crop_right, &s->crop_bottom);
413 
414  if (s->crop_right && s->crop_bottom) {
415  width = s->crop_right + 1 - s->crop_left;
416  height = s->crop_bottom + 1 - s->crop_top;
417  } else {
418  /* TODO: NDK MediaFormat should try getRect() first.
419  * Try crop-width/crop-height, it works on NVIDIA Shield.
420  */
421  AMEDIAFORMAT_GET_INT32(width, "crop-width", 0);
422  AMEDIAFORMAT_GET_INT32(height, "crop-height", 0);
423  }
424  if (!width || !height) {
425  width = s->width;
426  height = s->height;
427  }
428 
429  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
430  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
431 
432  if (s->display_width && s->display_height) {
433  AVRational sar = av_div_q(
434  (AVRational){ s->display_width, s->display_height },
435  (AVRational){ width, height });
436  ff_set_sar(avctx, sar);
437  }
438 
439  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
440  if (color_range)
442 
443  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
444  if (color_standard) {
447  }
448 
449  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
450  if (color_transfer)
452 
453  av_log(avctx, AV_LOG_INFO,
454  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
455  "resulting dimensions width=%d height=%d\n",
456  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
457  width, height);
458 
459  av_freep(&format);
460  return ff_set_dimensions(avctx, width, height);
461 fail:
462  av_freep(&format);
463  return ret;
464 }
465 
467 {
468  FFAMediaCodec *codec = s->codec;
469  int status;
470 
471  s->output_buffer_count = 0;
472 
473  s->draining = 0;
474  s->flushing = 0;
475  s->eos = 0;
476  atomic_fetch_add(&s->serial, 1);
477  atomic_init(&s->hw_buffer_count, 0);
478  s->current_input_buffer = -1;
479 
480  status = ff_AMediaCodec_flush(codec);
481  if (status < 0) {
482  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
483  return AVERROR_EXTERNAL;
484  }
485 
486  return 0;
487 }
488 
490  const char *mime, FFAMediaFormat *format)
491 {
492  int ret = 0;
493  int status;
494  int profile;
495 
496  enum AVPixelFormat pix_fmt;
497  static const enum AVPixelFormat pix_fmts[] = {
500  };
501 
502  s->avctx = avctx;
503  atomic_init(&s->refcount, 1);
504  atomic_init(&s->hw_buffer_count, 0);
505  atomic_init(&s->serial, 1);
506  s->current_input_buffer = -1;
507 
508  pix_fmt = ff_get_format(avctx, pix_fmts);
510  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
511 
512  if (avctx->hw_device_ctx) {
513  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
514  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
515  if (device_ctx->hwctx) {
516  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
517  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx);
518  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
519  }
520  }
521  }
522 
523  if (!s->surface && user_ctx && user_ctx->surface) {
524  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
525  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
526  }
527  }
528 
530  if (profile < 0) {
531  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
532  }
533 
534  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
535  if (!s->codec_name) {
536  // getCodecNameByType() can fail due to missing JVM, while NDK
537  // mediacodec can be used without JVM.
538  if (!s->use_ndk_codec) {
540  goto fail;
541  }
542  av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n");
543  } else {
544  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
545  }
546 
547  if (s->codec_name)
548  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec);
549  else {
550  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
551  if (s->codec) {
552  s->codec_name = ff_AMediaCodec_getName(s->codec);
553  if (!s->codec_name)
554  s->codec_name = av_strdup(mime);
555  }
556  }
557  if (!s->codec) {
558  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
560  goto fail;
561  }
562 
563  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
564  if (status < 0) {
566  av_log(avctx, AV_LOG_ERROR,
567  "Failed to configure codec %s (status = %d) with format %s\n",
568  s->codec_name, status, desc);
569  av_freep(&desc);
570 
572  goto fail;
573  }
574 
575  status = ff_AMediaCodec_start(s->codec);
576  if (status < 0) {
578  av_log(avctx, AV_LOG_ERROR,
579  "Failed to start codec %s (status = %d) with format %s\n",
580  s->codec_name, status, desc);
581  av_freep(&desc);
583  goto fail;
584  }
585 
586  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
587  if (s->format) {
588  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
589  av_log(avctx, AV_LOG_ERROR,
590  "Failed to configure context\n");
591  goto fail;
592  }
593  }
594 
595  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
596 
597  return 0;
598 
599 fail:
600  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
601  ff_mediacodec_dec_close(avctx, s);
602  return ret;
603 }
604 
606  AVPacket *pkt, bool wait)
607 {
608  int offset = 0;
609  int need_draining = 0;
610  uint8_t *data;
611  size_t size;
612  FFAMediaCodec *codec = s->codec;
613  int status;
614  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
615  int64_t pts;
616 
617  if (s->flushing) {
618  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
619  "until all output buffers have been released\n");
620  return AVERROR_EXTERNAL;
621  }
622 
623  if (pkt->size == 0) {
624  need_draining = 1;
625  }
626 
627  if (s->draining && s->eos) {
628  return AVERROR_EOF;
629  }
630 
631  while (offset < pkt->size || (need_draining && !s->draining)) {
632  ssize_t index = s->current_input_buffer;
633  if (index < 0) {
634  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
636  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
637  break;
638  }
639 
640  if (index < 0) {
641  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
642  return AVERROR_EXTERNAL;
643  }
644  }
645  s->current_input_buffer = -1;
646 
648  if (!data) {
649  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
650  return AVERROR_EXTERNAL;
651  }
652 
653  pts = pkt->pts;
654  if (pts == AV_NOPTS_VALUE) {
655  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
656  pts = 0;
657  }
658  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
660  }
661 
662  if (need_draining) {
664 
665  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
666 
668  if (status < 0) {
669  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
670  return AVERROR_EXTERNAL;
671  }
672 
673  av_log(avctx, AV_LOG_TRACE,
674  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
675 
676  s->draining = 1;
677  return 0;
678  }
679 
680  size = FFMIN(pkt->size - offset, size);
681  memcpy(data, pkt->data + offset, size);
682  offset += size;
683 
685  if (status < 0) {
686  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
687  return AVERROR_EXTERNAL;
688  }
689 
690  av_log(avctx, AV_LOG_TRACE,
691  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
692  }
693 
694  if (offset == 0)
695  return AVERROR(EAGAIN);
696  return offset;
697 }
698 
700  AVFrame *frame, bool wait)
701 {
702  int ret;
703  uint8_t *data;
704  ssize_t index;
705  size_t size;
706  FFAMediaCodec *codec = s->codec;
708  int status;
709  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
710 
711  if (s->draining && s->eos) {
712  return AVERROR_EOF;
713  }
714 
715  if (s->draining) {
716  /* If the codec is flushing or need to be flushed, block for a fair
717  * amount of time to ensure we got a frame */
718  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
719  } else if (s->output_buffer_count == 0 || !wait) {
720  /* If the codec hasn't produced any frames, do not block so we
721  * can push data to it as fast as possible, and get the first
722  * frame */
723  output_dequeue_timeout_us = 0;
724  }
725 
726  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
727  if (index >= 0) {
728  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
729  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
730  " flags=%" PRIu32 "\n", index, info.offset, info.size,
731  info.presentationTimeUs, info.flags);
732 
733  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
734  s->eos = 1;
735  }
736 
737  if (info.size) {
738  if (s->surface) {
739  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
740  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
741  return ret;
742  }
743  } else {
745  if (!data) {
746  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
747  return AVERROR_EXTERNAL;
748  }
749 
750  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
751  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
752  return ret;
753  }
754  }
755 
756  s->output_buffer_count++;
757  return 0;
758  } else {
760  if (status < 0) {
761  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
762  }
763  }
764 
765  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
766  char *format = NULL;
767 
768  if (s->format) {
769  status = ff_AMediaFormat_delete(s->format);
770  if (status < 0) {
771  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
772  }
773  }
774 
775  s->format = ff_AMediaCodec_getOutputFormat(codec);
776  if (!s->format) {
777  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
778  return AVERROR_EXTERNAL;
779  }
780 
781  format = ff_AMediaFormat_toString(s->format);
782  if (!format) {
783  return AVERROR_EXTERNAL;
784  }
785  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
786  av_freep(&format);
787 
788  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
789  return ret;
790  }
791 
792  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
794  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
795  if (s->draining) {
796  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
797  "while draining remaining frames, output will probably lack frames\n",
798  output_dequeue_timeout_us / 1000);
799  } else {
800  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
801  }
802  } else {
803  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
804  return AVERROR_EXTERNAL;
805  }
806 
807  if (s->draining && s->eos)
808  return AVERROR_EOF;
809  return AVERROR(EAGAIN);
810 }
811 
812 /*
813 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
814 * the codec (because the user retains frames). The codec stays in the
815 * flushing state.
816 *
817 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
818 * performed on the codec. The codec leaves the flushing state and can
819 * process again packets.
820 *
821 * ff_mediacodec_dec_flush returns a negative value if an error has
822 * occurred.
823 */
825 {
826  if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) {
827  int ret;
828 
829  /* No frames (holding a reference to the codec) are retained by the
830  * user, thus we can flush the codec and returns accordingly */
831  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
832  return ret;
833  }
834 
835  return 1;
836  }
837 
838  s->flushing = 1;
839  return 0;
840 }
841 
843 {
845 
846  return 0;
847 }
848 
850 {
851  return s->flushing;
852 }
ff_AMediaCodec_getInputBuffer
static uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:261
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1451
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:469
ff_AMediaFormat_delete
static int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:92
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
ff_AMediaCodec_delete
static int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:256
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:685
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1222
ff_AMediaCodec_start
static int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:241
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_AMediaFormatColorStandard_to_AVColorSpace
enum AVColorSpace ff_AMediaFormatColorStandard_to_AVColorSpace(int color_standard)
Map MediaFormat color standard to AVColorSpace.
Definition: mediacodec_wrapper.c:2610
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:94
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:264
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:86
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:842
ff_AMediaFormat_getRect
static int ff_AMediaFormat_getRect(FFAMediaFormat *format, const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom)
Definition: mediacodec_wrapper.h:127
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:374
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:486
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:678
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:699
AVPacket::data
uint8_t * data
Definition: packet.h:524
ff_AMediaCodec_infoOutputFormatChanged
static int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:311
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:85
ff_AMediaCodec_infoOutputBuffersChanged
static int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:306
data
const char data[16]
Definition: mxf.c:148
color_formats
static const struct @130 color_formats[]
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_AMediaCodec_queueInputBuffer
static int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.h:276
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:849
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:94
hwcontext_mediacodec.h
ff_AMediaCodec_configure
static int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, FFANativeWindow *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.h:233
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:96
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
fail
#define fail()
Definition: checkasm.h:179
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:824
pts
static int64_t pts
Definition: transcode_aac.c:644
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:84
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:671
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:206
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:117
ff_AMediaCodec_getName
static char * ff_AMediaCodec_getName(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:224
ff_AMediaCodec_getBufferFlagEndOfStream
static int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:321
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:95
info
MIPS optimizations info
Definition: mips.txt:2
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:304
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:49
decode.h
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
color_range
color_range
Definition: vf_selectivecolor.c:43
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:316
ff_AMediaCodec_getOutputFormat
static FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:286
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name, int ndk)
Definition: mediacodec_wrapper.c:2501
NULL
#define NULL
Definition: coverity.c:32
ff_AMediaCodec_flush
static int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:251
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:172
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:695
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:109
mediacodec_sw_buffer.h
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
Definition: mediacodec_surface.c:59
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:91
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:144
index
int index
Definition: gxfenc.c:90
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
ff_AMediaFormat_toString
static char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:97
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:346
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:93
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:357
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1556
ff_AMediaFormatColorStandard_to_AVColorPrimaries
enum AVColorPrimaries ff_AMediaFormatColorStandard_to_AVColorPrimaries(int color_standard)
Map MediaFormat color standard to AVColorPrimaries.
Definition: mediacodec_wrapper.c:2628
AVPacket::size
int size
Definition: packet.h:525
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are expressed.
Definition: avcodec.h:551
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:103
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:309
ff_AMediaCodec_createDecoderByType
FFAMediaCodec * ff_AMediaCodec_createDecoderByType(const char *mime_type, int ndk)
Definition: mediacodec_wrapper.c:2508
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:466
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:175
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:268
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:149
ff_AMediaFormatColorTransfer_to_AVColorTransfer
enum AVColorTransferCharacteristic ff_AMediaFormatColorTransfer_to_AVColorTransfer(int color_transfer)
Map MediaFormat color transfer to AVColorTransferCharacteristic.
Definition: mediacodec_wrapper.c:2638
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:517
FFAMediaCodec
Definition: mediacodec_wrapper.h:181
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:92
common.h
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1497
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
ff_mediacodec_surface_ref
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *native_window, void *log_ctx)
Definition: mediacodec_surface.c:30
profile
int profile
Definition: mxfenc.c:2227
AVCodecContext::height
int height
Definition: avcodec.h:618
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:657
AVMediaCodecDeviceContext::native_window
void * native_window
Pointer to ANativeWindow.
Definition: hwcontext_mediacodec.h:45
ff_AMediaCodec_dequeueInputBuffer
static ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:271
avcodec.h
ff_AMediaFormatColorRange_to_AVColorRange
enum AVColorRange ff_AMediaFormatColorRange_to_AVColorRange(int color_range)
Map MediaFormat color range to AVColorRange.
Definition: mediacodec_wrapper.c:2593
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:445
status
ov_status_e status
Definition: dnn_backend_openvino.c:121
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:97
color_format
int color_format
Definition: mediacodecdec_common.c:102
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:75
mem.h
ff_AMediaCodec_getOutputBuffer
static uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:266
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:193
ff_AMediaCodec_cleanOutputBuffers
static int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:336
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:501
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ff_AMediaCodec_infoTryAgainLater
static int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:301
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:618
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:605
timestamp.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:474
ff_AMediaCodec_releaseOutputBuffer
static int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.h:291
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
FFAMediaFormat
Definition: mediacodec_wrapper.h:63
ff_AMediaCodec_dequeueOutputBuffer
static ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:281
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:642
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:90
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:89
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:489