mediacodecdec_common.c (40737B)
1 /* 2 * Android MediaCodec decoder 3 * 4 * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com> 5 * 6 * This file is part of FFmpeg. 7 * 8 * FFmpeg is free software; you can redistribute it and/or 9 * modify it under the terms of the GNU Lesser General Public 10 * License as published by the Free Software Foundation; either 11 * version 2.1 of the License, or (at your option) any later version. 12 * 13 * FFmpeg is distributed in the hope that it will be useful, 14 * but WITHOUT ANY WARRANTY; without even the implied warranty of 15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 16 * Lesser General Public License for more details. 17 * 18 * You should have received a copy of the GNU Lesser General Public 19 * License along with FFmpeg; if not, write to the Free Software 20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 21 */ 22 23 #include <string.h> 24 #include <sys/types.h> 25 26 #include "libavutil/avassert.h" 27 #include "libavutil/common.h" 28 #include "libavutil/hwcontext_mediacodec.h" 29 #include "libavutil/mem.h" 30 #include "libavutil/log.h" 31 #include "libavutil/pixfmt.h" 32 #include "libavutil/fftime.h" 33 #include "libavutil/timestamp.h" 34 #include "libavutil/channel_layout.h" 35 36 #include "avcodec.h" 37 #include "decode.h" 38 39 #include "mediacodec.h" 40 #include "mediacodec_surface.h" 41 #include "mediacodec_sw_buffer.h" 42 #include "mediacodec_wrapper.h" 43 #include "mediacodecdec_common.h" 44 45 /** 46 * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google 47 * codec workarounds used in various place are taken from the Gstreamer 48 * project. 49 * 50 * Gstreamer references: 51 * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/ 52 * 53 * Gstreamer copyright notice: 54 * 55 * Copyright (C) 2012, Collabora Ltd. 56 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk> 57 * 58 * Copyright (C) 2012, Rafaël Carré <funman@videolanorg> 59 * 60 * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com> 61 * 62 * Copyright (C) 2014-2015, Collabora Ltd. 63 * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com> 64 * 65 * Copyright (C) 2015, Edward Hervey 66 * Author: Edward Hervey <bilboed@gmail.com> 67 * 68 * Copyright (C) 2015, Matthew Waters <matthew@centricular.com> 69 * 70 * This library is free software; you can redistribute it and/or 71 * modify it under the terms of the GNU Lesser General Public 72 * License as published by the Free Software Foundation 73 * version 2.1 of the License. 74 * 75 * This library is distributed in the hope that it will be useful, 76 * but WITHOUT ANY WARRANTY; without even the implied warranty of 77 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 78 * Lesser General Public License for more details. 79 * 80 * You should have received a copy of the GNU Lesser General Public 81 * License along with this library; if not, write to the Free Software 82 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 83 * 84 */ 85 86 #define INPUT_DEQUEUE_TIMEOUT_US 8000 87 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000 88 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 8000 89 90 enum { 91 ENCODING_PCM_16BIT = 0x00000002, 92 ENCODING_PCM_8BIT = 0x00000003, 93 ENCODING_PCM_FLOAT = 0x00000004, 94 ENCODING_PCM_24BIT_PACKED = 0x00000015, 95 ENCODING_PCM_32BIT = 0x00000016, 96 }; 97 98 static const struct { 99 100 int pcm_format; 101 enum AVSampleFormat sample_format; 102 103 } sample_formats[] = { 104 105 { ENCODING_PCM_16BIT, AV_SAMPLE_FMT_S16 }, 106 { ENCODING_PCM_8BIT, AV_SAMPLE_FMT_U8 }, 107 { ENCODING_PCM_FLOAT, AV_SAMPLE_FMT_FLT }, 108 { ENCODING_PCM_32BIT, AV_SAMPLE_FMT_S32 }, 109 { 0 } 110 }; 111 112 static enum AVSampleFormat mcdec_map_pcm_format(AVCodecContext *avctx, 113 MediaCodecDecContext *s, 114 int pcm_format) 115 { 116 enum AVSampleFormat ret = AV_SAMPLE_FMT_NONE; 117 118 for (int i = 0; i < FF_ARRAY_ELEMS(sample_formats); i++) { 119 if (sample_formats[i].pcm_format == pcm_format) { 120 return sample_formats[i].sample_format; 121 } 122 } 123 124 av_log(avctx, AV_LOG_ERROR, "Output sample format 0x%x (value=%d) is not supported\n", 125 pcm_format, pcm_format); 126 127 return ret; 128 } 129 130 enum 131 { 132 CHANNEL_OUT_FRONT_LEFT = 0x4, 133 CHANNEL_OUT_FRONT_RIGHT = 0x8, 134 CHANNEL_OUT_FRONT_CENTER = 0x10, 135 CHANNEL_OUT_LOW_FREQUENCY = 0x20, 136 CHANNEL_OUT_BACK_LEFT = 0x40, 137 CHANNEL_OUT_BACK_RIGHT = 0x80, 138 CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100, 139 CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200, 140 CHANNEL_OUT_BACK_CENTER = 0x400, 141 CHANNEL_OUT_SIDE_LEFT = 0x800, 142 CHANNEL_OUT_SIDE_RIGHT = 0x1000, 143 CHANNEL_OUT_TOP_CENTER = 0x2000, 144 CHANNEL_OUT_TOP_FRONT_LEFT = 0x4000, 145 CHANNEL_OUT_TOP_FRONT_CENTER = 0x8000, 146 CHANNEL_OUT_TOP_FRONT_RIGHT = 0x10000, 147 CHANNEL_OUT_TOP_BACK_LEFT = 0x20000, 148 CHANNEL_OUT_TOP_BACK_CENTER = 0x40000, 149 CHANNEL_OUT_TOP_BACK_RIGHT = 0x80000, 150 }; 151 152 static const struct { 153 154 int mask; 155 uint64_t layout; 156 157 } channel_masks[] = { 158 { CHANNEL_OUT_FRONT_LEFT, AV_CH_FRONT_LEFT }, 159 { CHANNEL_OUT_FRONT_RIGHT, AV_CH_FRONT_RIGHT }, 160 { CHANNEL_OUT_FRONT_CENTER, AV_CH_FRONT_CENTER }, 161 { CHANNEL_OUT_LOW_FREQUENCY, AV_CH_LOW_FREQUENCY }, 162 { CHANNEL_OUT_BACK_LEFT, AV_CH_BACK_LEFT }, 163 { CHANNEL_OUT_BACK_RIGHT, AV_CH_BACK_RIGHT }, 164 { CHANNEL_OUT_FRONT_LEFT_OF_CENTER, AV_CH_FRONT_LEFT_OF_CENTER }, 165 { CHANNEL_OUT_FRONT_RIGHT_OF_CENTER, AV_CH_FRONT_RIGHT_OF_CENTER }, 166 { CHANNEL_OUT_BACK_CENTER, AV_CH_BACK_CENTER }, 167 { CHANNEL_OUT_SIDE_LEFT, AV_CH_SIDE_LEFT }, 168 { CHANNEL_OUT_SIDE_RIGHT, AV_CH_SIDE_RIGHT }, 169 { CHANNEL_OUT_TOP_CENTER, AV_CH_TOP_CENTER }, 170 { CHANNEL_OUT_TOP_FRONT_LEFT, AV_CH_TOP_FRONT_LEFT }, 171 { CHANNEL_OUT_TOP_FRONT_CENTER, AV_CH_TOP_FRONT_CENTER }, 172 { CHANNEL_OUT_TOP_FRONT_RIGHT, AV_CH_TOP_FRONT_RIGHT }, 173 { CHANNEL_OUT_TOP_BACK_LEFT, AV_CH_TOP_BACK_LEFT }, 174 { CHANNEL_OUT_TOP_BACK_CENTER, AV_CH_TOP_BACK_CENTER }, 175 { CHANNEL_OUT_TOP_BACK_RIGHT, AV_CH_TOP_BACK_RIGHT }, 176 }; 177 178 static uint64_t mcdec_map_channel_mask(AVCodecContext *avctx, 179 int channel_mask) 180 { 181 uint64_t channel_layout = 0; 182 183 for (int i = 0; i < FF_ARRAY_ELEMS(channel_masks); i++) { 184 if (channel_mask & channel_masks[i].mask) 185 channel_layout |= channel_masks[i].layout; 186 } 187 188 return channel_layout; 189 } 190 191 enum { 192 COLOR_FormatYUV420Planar = 0x13, 193 COLOR_FormatYUV420SemiPlanar = 0x15, 194 COLOR_FormatYCbYCr = 0x19, 195 COLOR_FormatAndroidOpaque = 0x7F000789, 196 COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00, 197 COLOR_QCOM_FormatYUV420SemiPlanar32m = 0x7fa30c04, 198 COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7fa30c03, 199 COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100, 200 COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced = 0x7f000001, 201 }; 202 203 static const struct { 204 205 int color_format; 206 enum AVPixelFormat pix_fmt; 207 208 } color_formats[] = { 209 210 { COLOR_FormatYUV420Planar, AV_PIX_FMT_YUV420P }, 211 { COLOR_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 }, 212 { COLOR_QCOM_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 }, 213 { COLOR_QCOM_FormatYUV420SemiPlanar32m, AV_PIX_FMT_NV12 }, 214 { COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka, AV_PIX_FMT_NV12 }, 215 { COLOR_TI_FormatYUV420PackedSemiPlanar, AV_PIX_FMT_NV12 }, 216 { COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced, AV_PIX_FMT_NV12 }, 217 { 0 } 218 }; 219 220 static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, 221 MediaCodecDecContext *s, 222 int color_format) 223 { 224 int i; 225 enum AVPixelFormat ret = AV_PIX_FMT_NONE; 226 227 if (s->surface) { 228 return AV_PIX_FMT_MEDIACODEC; 229 } 230 231 if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) { 232 s->color_format = color_format = COLOR_TI_FormatYUV420PackedSemiPlanar; 233 } 234 235 for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) { 236 if (color_formats[i].color_format == color_format) { 237 return color_formats[i].pix_fmt; 238 } 239 } 240 241 av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n", 242 color_format, color_format); 243 244 return ret; 245 } 246 247 static void ff_mediacodec_dec_ref(MediaCodecDecContext *s) 248 { 249 atomic_fetch_add(&s->refcount, 1); 250 } 251 252 static void ff_mediacodec_dec_unref(MediaCodecDecContext *s) 253 { 254 if (!s) 255 return; 256 257 if (atomic_fetch_sub(&s->refcount, 1) == 1) { 258 if (s->codec) { 259 ff_AMediaCodec_delete(s->codec); 260 s->codec = NULL; 261 } 262 263 if (s->format) { 264 ff_AMediaFormat_delete(s->format); 265 s->format = NULL; 266 } 267 268 if (s->surface) { 269 ff_mediacodec_surface_unref(s->surface, NULL); 270 s->surface = NULL; 271 } 272 273 av_freep(&s->codec_name); 274 av_freep(&s); 275 } 276 } 277 278 static void mediacodec_buffer_release(void *opaque, uint8_t *data) 279 { 280 AVMediaCodecBuffer *buffer = opaque; 281 MediaCodecDecContext *ctx = buffer->ctx; 282 int released = atomic_load(&buffer->released); 283 284 if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) { 285 atomic_fetch_sub(&ctx->hw_buffer_count, 1); 286 av_log(ctx->avctx, AV_LOG_DEBUG, 287 "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n", 288 buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count)); 289 ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0); 290 } 291 292 ff_mediacodec_dec_unref(ctx); 293 av_freep(&buffer); 294 } 295 296 static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, 297 MediaCodecDecContext *s, 298 ssize_t index, 299 FFAMediaCodecBufferInfo *info, 300 AVFrame *frame) 301 { 302 int ret = 0; 303 int status = 0; 304 AVMediaCodecBuffer *buffer = NULL; 305 306 frame->buf[0] = NULL; 307 frame->width = avctx->width; 308 frame->height = avctx->height; 309 frame->format = avctx->pix_fmt; 310 frame->sample_aspect_ratio = avctx->sample_aspect_ratio; 311 312 if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) { 313 frame->pts = av_rescale_q(info->presentationTimeUs, 314 AV_TIME_BASE_Q, 315 avctx->pkt_timebase); 316 } else { 317 frame->pts = info->presentationTimeUs; 318 } 319 frame->pkt_dts = AV_NOPTS_VALUE; 320 frame->color_range = avctx->color_range; 321 frame->color_primaries = avctx->color_primaries; 322 frame->color_trc = avctx->color_trc; 323 frame->colorspace = avctx->colorspace; 324 325 buffer = av_mallocz(sizeof(AVMediaCodecBuffer)); 326 if (!buffer) { 327 ret = AVERROR(ENOMEM); 328 goto fail; 329 } 330 331 atomic_init(&buffer->released, 0); 332 333 frame->buf[0] = av_buffer_create(NULL, 334 0, 335 mediacodec_buffer_release, 336 buffer, 337 AV_BUFFER_FLAG_READONLY); 338 339 if (!frame->buf[0]) { 340 ret = AVERROR(ENOMEM); 341 goto fail; 342 343 } 344 345 buffer->ctx = s; 346 buffer->serial = atomic_load(&s->serial); 347 ff_mediacodec_dec_ref(s); 348 349 buffer->index = index; 350 buffer->pts = info->presentationTimeUs; 351 352 frame->data[3] = (uint8_t *)buffer; 353 354 atomic_fetch_add(&s->hw_buffer_count, 1); 355 av_log(avctx, AV_LOG_DEBUG, 356 "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n", 357 buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count)); 358 359 return 0; 360 fail: 361 av_freep(&buffer); 362 status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0); 363 if (status < 0) { 364 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n"); 365 ret = AVERROR_EXTERNAL; 366 } 367 368 return ret; 369 } 370 371 static int mediacodec_wrap_sw_audio_buffer(AVCodecContext *avctx, 372 MediaCodecDecContext *s, 373 uint8_t *data, 374 size_t size, 375 ssize_t index, 376 FFAMediaCodecBufferInfo *info, 377 AVFrame *frame) 378 { 379 int ret = 0; 380 int status = 0; 381 const int sample_size = av_get_bytes_per_sample(avctx->sample_fmt); 382 if (!sample_size) { 383 av_log(avctx, AV_LOG_ERROR, "Could not get bytes per sample\n"); 384 ret = AVERROR(ENOSYS); 385 goto done; 386 } 387 388 frame->format = avctx->sample_fmt; 389 frame->sample_rate = avctx->sample_rate; 390 frame->nb_samples = info->size / (sample_size * avctx->ch_layout.nb_channels); 391 392 ret = av_channel_layout_copy(&frame->ch_layout, &avctx->ch_layout); 393 if (ret < 0) { 394 av_log(avctx, AV_LOG_ERROR, "Could not copy channel layout\n"); 395 goto done; 396 } 397 398 /* MediaCodec buffers needs to be copied to our own refcounted buffers 399 * because the flush command invalidates all input and output buffers. 400 */ 401 ret = ff_get_buffer(avctx, frame, 0); 402 if (ret < 0) { 403 av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n"); 404 goto done; 405 } 406 407 /* Override frame->pts as ff_get_buffer will override its value based 408 * on the last avpacket received which is not in sync with the frame: 409 * * N avpackets can be pushed before 1 frame is actually returned 410 * * 0-sized avpackets are pushed to flush remaining frames at EOS */ 411 if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) { 412 frame->pts = av_rescale_q(info->presentationTimeUs, 413 AV_TIME_BASE_Q, 414 avctx->pkt_timebase); 415 } else { 416 frame->pts = info->presentationTimeUs; 417 } 418 frame->pkt_dts = AV_NOPTS_VALUE; 419 frame->flags |= AV_FRAME_FLAG_KEY; 420 421 av_log(avctx, AV_LOG_TRACE, 422 "Frame: format=%d channels=%d sample_rate=%d nb_samples=%d", 423 avctx->sample_fmt, avctx->ch_layout.nb_channels, avctx->sample_rate, frame->nb_samples); 424 425 memcpy(frame->data[0], data, info->size); 426 427 ret = 0; 428 done: 429 status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0); 430 if (status < 0) { 431 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n"); 432 ret = AVERROR_EXTERNAL; 433 } 434 435 return ret; 436 } 437 438 static int mediacodec_wrap_sw_video_buffer(AVCodecContext *avctx, 439 MediaCodecDecContext *s, 440 uint8_t *data, 441 size_t size, 442 ssize_t index, 443 FFAMediaCodecBufferInfo *info, 444 AVFrame *frame) 445 { 446 int ret = 0; 447 int status = 0; 448 449 frame->width = avctx->width; 450 frame->height = avctx->height; 451 frame->format = avctx->pix_fmt; 452 453 /* MediaCodec buffers needs to be copied to our own refcounted buffers 454 * because the flush command invalidates all input and output buffers. 455 */ 456 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) { 457 av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n"); 458 goto done; 459 } 460 461 /* Override frame->pkt_pts as ff_get_buffer will override its value based 462 * on the last avpacket received which is not in sync with the frame: 463 * * N avpackets can be pushed before 1 frame is actually returned 464 * * 0-sized avpackets are pushed to flush remaining frames at EOS */ 465 if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) { 466 frame->pts = av_rescale_q(info->presentationTimeUs, 467 AV_TIME_BASE_Q, 468 avctx->pkt_timebase); 469 } else { 470 frame->pts = info->presentationTimeUs; 471 } 472 frame->pkt_dts = AV_NOPTS_VALUE; 473 474 av_log(avctx, AV_LOG_TRACE, 475 "Frame: width=%d stride=%d height=%d slice-height=%d " 476 "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s " 477 "destination linesizes=%d,%d,%d\n" , 478 avctx->width, s->stride, avctx->height, s->slice_height, 479 s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name, 480 frame->linesize[0], frame->linesize[1], frame->linesize[2]); 481 482 switch (s->color_format) { 483 case COLOR_FormatYUV420Planar: 484 ff_mediacodec_sw_buffer_copy_yuv420_planar(avctx, s, data, size, info, frame); 485 break; 486 case COLOR_FormatYUV420SemiPlanar: 487 case COLOR_QCOM_FormatYUV420SemiPlanar: 488 case COLOR_QCOM_FormatYUV420SemiPlanar32m: 489 ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(avctx, s, data, size, info, frame); 490 break; 491 case COLOR_TI_FormatYUV420PackedSemiPlanar: 492 case COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced: 493 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(avctx, s, data, size, info, frame); 494 break; 495 case COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka: 496 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(avctx, s, data, size, info, frame); 497 break; 498 default: 499 av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n", 500 s->color_format, s->color_format); 501 ret = AVERROR(EINVAL); 502 goto done; 503 } 504 505 ret = 0; 506 done: 507 status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0); 508 if (status < 0) { 509 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n"); 510 ret = AVERROR_EXTERNAL; 511 } 512 513 return ret; 514 } 515 516 static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, 517 MediaCodecDecContext *s, 518 uint8_t *data, 519 size_t size, 520 ssize_t index, 521 FFAMediaCodecBufferInfo *info, 522 AVFrame *frame) 523 { 524 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) 525 return mediacodec_wrap_sw_audio_buffer(avctx, s, data, size, index, info, frame); 526 else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) 527 return mediacodec_wrap_sw_video_buffer(avctx, s, data, size, index, info, frame); 528 else 529 av_assert0(0); 530 } 531 532 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \ 533 int32_t value = 0; \ 534 if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \ 535 (name) = value; \ 536 } else if (mandatory) { \ 537 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \ 538 ret = AVERROR_EXTERNAL; \ 539 goto fail; \ 540 } \ 541 } while (0) \ 542 543 static int mediacodec_dec_parse_video_format(AVCodecContext *avctx, MediaCodecDecContext *s) 544 { 545 int ret = 0; 546 int width = 0; 547 int height = 0; 548 int color_range = 0; 549 int color_standard = 0; 550 int color_transfer = 0; 551 char *format = NULL; 552 553 if (!s->format) { 554 av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n"); 555 return AVERROR(EINVAL); 556 } 557 558 format = ff_AMediaFormat_toString(s->format); 559 if (!format) { 560 return AVERROR_EXTERNAL; 561 } 562 av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format); 563 564 /* Mandatory fields */ 565 AMEDIAFORMAT_GET_INT32(s->width, "width", 1); 566 AMEDIAFORMAT_GET_INT32(s->height, "height", 1); 567 568 AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0); 569 s->stride = s->stride > 0 ? s->stride : s->width; 570 571 AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0); 572 573 if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) { 574 s->slice_height = FFALIGN(s->height, 16); 575 } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) { 576 s->slice_height = avctx->height; 577 s->stride = avctx->width; 578 } else if (s->slice_height == 0) { 579 s->slice_height = s->height; 580 } 581 582 AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1); 583 avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format); 584 if (avctx->pix_fmt == AV_PIX_FMT_NONE) { 585 av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n"); 586 ret = AVERROR(EINVAL); 587 goto fail; 588 } 589 590 /* Optional fields */ 591 AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0); 592 AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0); 593 AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0); 594 AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0); 595 596 // Try "crop" for NDK 597 // MediaTek SOC return some default value like Rect(0, 0, 318, 238) 598 if (!(s->crop_right && s->crop_bottom) && s->use_ndk_codec && !strstr(s->codec_name, ".mtk.")) 599 ff_AMediaFormat_getRect(s->format, "crop", &s->crop_left, &s->crop_top, &s->crop_right, &s->crop_bottom); 600 601 if (s->crop_right && s->crop_bottom) { 602 width = s->crop_right + 1 - s->crop_left; 603 height = s->crop_bottom + 1 - s->crop_top; 604 } else { 605 /* TODO: NDK MediaFormat should try getRect() first. 606 * Try crop-width/crop-height, it works on NVIDIA Shield. 607 */ 608 AMEDIAFORMAT_GET_INT32(width, "crop-width", 0); 609 AMEDIAFORMAT_GET_INT32(height, "crop-height", 0); 610 } 611 if (!width || !height) { 612 width = s->width; 613 height = s->height; 614 } 615 616 AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0); 617 AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0); 618 619 if (s->display_width && s->display_height) { 620 AVRational sar = av_div_q( 621 (AVRational){ s->display_width, s->display_height }, 622 (AVRational){ width, height }); 623 ff_set_sar(avctx, sar); 624 } 625 626 AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0); 627 if (color_range) 628 avctx->color_range = ff_AMediaFormatColorRange_to_AVColorRange(color_range); 629 630 AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0); 631 if (color_standard) { 632 avctx->colorspace = ff_AMediaFormatColorStandard_to_AVColorSpace(color_standard); 633 avctx->color_primaries = ff_AMediaFormatColorStandard_to_AVColorPrimaries(color_standard); 634 } 635 636 AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0); 637 if (color_transfer) 638 avctx->color_trc = ff_AMediaFormatColorTransfer_to_AVColorTransfer(color_transfer); 639 640 av_log(avctx, AV_LOG_INFO, 641 "Output crop parameters top=%d bottom=%d left=%d right=%d, " 642 "resulting dimensions width=%d height=%d\n", 643 s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, 644 width, height); 645 646 av_freep(&format); 647 return ff_set_dimensions(avctx, width, height); 648 fail: 649 av_freep(&format); 650 return ret; 651 } 652 653 static int mediacodec_dec_parse_audio_format(AVCodecContext *avctx, MediaCodecDecContext *s) 654 { 655 int ret = 0; 656 int sample_rate = 0; 657 int channel_count = 0; 658 int channel_mask = 0; 659 int pcm_encoding = 0; 660 char *format = NULL; 661 662 if (!s->format) { 663 av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n"); 664 return AVERROR(EINVAL); 665 } 666 667 format = ff_AMediaFormat_toString(s->format); 668 if (!format) { 669 return AVERROR_EXTERNAL; 670 } 671 av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format); 672 673 /* Mandatory fields */ 674 AMEDIAFORMAT_GET_INT32(channel_count, "channel-count", 1); 675 AMEDIAFORMAT_GET_INT32(sample_rate, "sample-rate", 1); 676 677 AMEDIAFORMAT_GET_INT32(pcm_encoding, "pcm-encoding", 0); 678 if (pcm_encoding) 679 avctx->sample_fmt = mcdec_map_pcm_format(avctx, s, pcm_encoding); 680 else 681 avctx->sample_fmt = AV_SAMPLE_FMT_S16; 682 683 avctx->sample_rate = sample_rate; 684 685 AMEDIAFORMAT_GET_INT32(channel_mask, "channel-mask", 0); 686 if (channel_mask) 687 av_channel_layout_from_mask(&avctx->ch_layout, mcdec_map_channel_mask(avctx, channel_mask)); 688 else 689 av_channel_layout_default(&avctx->ch_layout, channel_count); 690 691 av_log(avctx, AV_LOG_INFO, 692 "Output parameters channel-count=%d channel-layout=%x sample-rate=%d\n", 693 channel_count, channel_mask, sample_rate); 694 695 fail: 696 av_freep(&format); 697 return ret; 698 } 699 700 static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s) 701 { 702 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) 703 return mediacodec_dec_parse_audio_format(avctx, s); 704 else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) 705 return mediacodec_dec_parse_video_format(avctx, s); 706 else 707 av_assert0(0); 708 } 709 710 static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s) 711 { 712 FFAMediaCodec *codec = s->codec; 713 int status; 714 715 s->output_buffer_count = 0; 716 717 s->draining = 0; 718 s->flushing = 0; 719 s->eos = 0; 720 atomic_fetch_add(&s->serial, 1); 721 atomic_init(&s->hw_buffer_count, 0); 722 s->current_input_buffer = -1; 723 724 status = ff_AMediaCodec_flush(codec); 725 if (status < 0) { 726 av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n"); 727 return AVERROR_EXTERNAL; 728 } 729 730 return 0; 731 } 732 733 static int mediacodec_dec_get_video_codec(AVCodecContext *avctx, MediaCodecDecContext *s, 734 const char *mime, FFAMediaFormat *format) 735 { 736 int profile; 737 738 enum AVPixelFormat pix_fmt; 739 static const enum AVPixelFormat pix_fmts[] = { 740 AV_PIX_FMT_MEDIACODEC, 741 AV_PIX_FMT_NONE, 742 }; 743 744 pix_fmt = ff_get_format(avctx, pix_fmts); 745 if (pix_fmt == AV_PIX_FMT_MEDIACODEC) { 746 AVMediaCodecContext *user_ctx = avctx->hwaccel_context; 747 748 if (avctx->hw_device_ctx) { 749 AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data); 750 if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) { 751 if (device_ctx->hwctx) { 752 AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx; 753 s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx); 754 av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface); 755 } 756 } 757 } 758 759 if (!s->surface && user_ctx && user_ctx->surface) { 760 s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx); 761 av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface); 762 } 763 } 764 765 profile = ff_AMediaCodecProfile_getProfileFromAVCodecContext(avctx); 766 if (profile < 0) { 767 av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n"); 768 } 769 770 s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx); 771 if (!s->codec_name) { 772 // getCodecNameByType() can fail due to missing JVM, while NDK 773 // mediacodec can be used without JVM. 774 if (!s->use_ndk_codec) { 775 return AVERROR_EXTERNAL; 776 } 777 av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n"); 778 } else { 779 av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name); 780 } 781 782 if (s->codec_name) 783 s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec); 784 else { 785 s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec); 786 if (s->codec) { 787 s->codec_name = ff_AMediaCodec_getName(s->codec); 788 if (!s->codec_name) 789 s->codec_name = av_strdup(mime); 790 } 791 } 792 if (!s->codec) { 793 av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name); 794 return AVERROR_EXTERNAL; 795 } 796 797 return 0; 798 } 799 800 static int mediacodec_dec_get_audio_codec(AVCodecContext *avctx, MediaCodecDecContext *s, 801 const char *mime, FFAMediaFormat *format) 802 { 803 s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec); 804 if (!s->codec) { 805 av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for mime %s\n", mime); 806 return AVERROR_EXTERNAL; 807 } 808 809 s->codec_name = ff_AMediaCodec_getName(s->codec); 810 if (!s->codec_name) { 811 s->codec_name = av_strdup(mime); 812 if (!s->codec_name) 813 return AVERROR(ENOMEM); 814 } 815 816 return 0; 817 } 818 819 int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, 820 const char *mime, FFAMediaFormat *format) 821 { 822 int ret; 823 int status; 824 825 s->avctx = avctx; 826 atomic_init(&s->refcount, 1); 827 atomic_init(&s->hw_buffer_count, 0); 828 atomic_init(&s->serial, 1); 829 s->current_input_buffer = -1; 830 831 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) 832 ret = mediacodec_dec_get_audio_codec(avctx, s, mime, format); 833 else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) 834 ret = mediacodec_dec_get_video_codec(avctx, s, mime, format); 835 else 836 av_assert0(0); 837 if (ret < 0) 838 goto fail; 839 840 status = ff_AMediaCodec_configure(s->codec, format, s->surface, avctx->moz_ndk_crypto, 0); 841 if (status < 0) { 842 char *desc = ff_AMediaFormat_toString(format); 843 av_log(avctx, AV_LOG_ERROR, 844 "Failed to configure codec %s (status = %d) with format %s\n", 845 s->codec_name, status, desc); 846 av_freep(&desc); 847 848 ret = AVERROR_EXTERNAL; 849 goto fail; 850 } 851 852 status = ff_AMediaCodec_start(s->codec); 853 if (status < 0) { 854 char *desc = ff_AMediaFormat_toString(format); 855 av_log(avctx, AV_LOG_ERROR, 856 "Failed to start codec %s (status = %d) with format %s\n", 857 s->codec_name, status, desc); 858 av_freep(&desc); 859 ret = AVERROR_EXTERNAL; 860 goto fail; 861 } 862 863 if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) { 864 s->format = ff_AMediaCodec_getOutputFormat(s->codec); 865 if (s->format) { 866 if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) { 867 av_log(avctx, AV_LOG_ERROR, 868 "Failed to configure context\n"); 869 goto fail; 870 } 871 } 872 } 873 874 av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec); 875 876 return 0; 877 878 fail: 879 av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec); 880 ff_mediacodec_dec_close(avctx, s); 881 return ret; 882 } 883 884 int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, 885 AVPacket *pkt, bool wait) 886 { 887 int offset = 0; 888 int need_draining = 0; 889 uint8_t *data; 890 size_t size; 891 FFAMediaCodec *codec = s->codec; 892 int status; 893 int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0; 894 int64_t pts; 895 896 if (s->flushing) { 897 av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer " 898 "until all output buffers have been released\n"); 899 return AVERROR_EXTERNAL; 900 } 901 902 if (pkt->size == 0) { 903 need_draining = 1; 904 } 905 906 if (s->draining && s->eos) { 907 return AVERROR_EOF; 908 } 909 910 while (offset < pkt->size || (need_draining && !s->draining)) { 911 ssize_t index = s->current_input_buffer; 912 if (index < 0) { 913 index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us); 914 if (ff_AMediaCodec_infoTryAgainLater(codec, index)) { 915 av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n"); 916 break; 917 } 918 919 if (index < 0) { 920 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index); 921 return AVERROR_EXTERNAL; 922 } 923 } 924 s->current_input_buffer = -1; 925 926 data = ff_AMediaCodec_getInputBuffer(codec, index, &size); 927 if (!data) { 928 av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n"); 929 return AVERROR_EXTERNAL; 930 } 931 932 pts = pkt->pts; 933 if (pts == AV_NOPTS_VALUE) { 934 av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n"); 935 pts = 0; 936 } 937 if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) { 938 pts = av_rescale_q(pts, avctx->pkt_timebase, AV_TIME_BASE_Q); 939 } 940 941 if (need_draining) { 942 uint32_t flags = ff_AMediaCodec_getBufferFlagEndOfStream(codec); 943 944 av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n"); 945 946 if (pkt->moz_ndk_crypto_info) { 947 status = ff_AMediaCodec_queueSecureInputBuffer(codec, index, 0, pkt->moz_ndk_crypto_info, pts, flags); 948 } else { 949 status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, 0, pts, flags); 950 } 951 if (status < 0) { 952 av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status); 953 return AVERROR_EXTERNAL; 954 } 955 956 av_log(avctx, AV_LOG_TRACE, 957 "Queued empty EOS input buffer %zd with flags=%d\n", index, flags); 958 959 s->draining = 1; 960 return 0; 961 } 962 963 size = FFMIN(pkt->size - offset, size); 964 memcpy(data, pkt->data + offset, size); 965 offset += size; 966 967 if (pkt->moz_ndk_crypto_info) { 968 status = ff_AMediaCodec_queueSecureInputBuffer(codec, index, 0, pkt->moz_ndk_crypto_info, pts, 0); 969 } else { 970 status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, size, pts, 0); 971 } 972 if (status < 0) { 973 av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status); 974 return AVERROR_EXTERNAL; 975 } 976 977 av_log(avctx, AV_LOG_TRACE, 978 "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts); 979 } 980 981 if (offset == 0) 982 return AVERROR(EAGAIN); 983 return offset; 984 } 985 986 int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, 987 AVFrame *frame, bool wait) 988 { 989 int ret; 990 uint8_t *data; 991 ssize_t index; 992 size_t size; 993 FFAMediaCodec *codec = s->codec; 994 FFAMediaCodecBufferInfo info = { 0 }; 995 int status; 996 int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US; 997 998 if (s->draining && s->eos) { 999 return AVERROR_EOF; 1000 } 1001 1002 if (s->draining) { 1003 /* If the codec is flushing or need to be flushed, block for a fair 1004 * amount of time to ensure we got a frame */ 1005 output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US; 1006 } else if (s->output_buffer_count == 0 || !wait) { 1007 /* If the codec hasn't produced any frames, do not block so we 1008 * can push data to it as fast as possible, and get the first 1009 * frame */ 1010 output_dequeue_timeout_us = 0; 1011 } 1012 1013 index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us); 1014 if (index >= 0) { 1015 av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd" 1016 " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64 1017 " flags=%" PRIu32 "\n", index, info.offset, info.size, 1018 info.presentationTimeUs, info.flags); 1019 1020 if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) { 1021 s->eos = 1; 1022 } 1023 1024 if (info.size) { 1025 if (s->surface) { 1026 if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) { 1027 av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n"); 1028 return ret; 1029 } 1030 } else { 1031 data = ff_AMediaCodec_getOutputBuffer(codec, index, &size); 1032 if (!data) { 1033 av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n"); 1034 return AVERROR_EXTERNAL; 1035 } 1036 1037 if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) { 1038 av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n"); 1039 return ret; 1040 } 1041 } 1042 1043 s->output_buffer_count++; 1044 return 0; 1045 } else { 1046 status = ff_AMediaCodec_releaseOutputBuffer(codec, index, 0); 1047 if (status < 0) { 1048 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n"); 1049 } 1050 } 1051 1052 } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) { 1053 char *format = NULL; 1054 1055 if (s->format) { 1056 status = ff_AMediaFormat_delete(s->format); 1057 if (status < 0) { 1058 av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format); 1059 } 1060 } 1061 1062 s->format = ff_AMediaCodec_getOutputFormat(codec); 1063 if (!s->format) { 1064 av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n"); 1065 return AVERROR_EXTERNAL; 1066 } 1067 1068 format = ff_AMediaFormat_toString(s->format); 1069 if (!format) { 1070 return AVERROR_EXTERNAL; 1071 } 1072 av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format); 1073 av_freep(&format); 1074 1075 if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) { 1076 return ret; 1077 } 1078 1079 } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) { 1080 ff_AMediaCodec_cleanOutputBuffers(codec); 1081 } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) { 1082 if (s->draining) { 1083 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms " 1084 "while draining remaining frames, output will probably lack frames\n", 1085 output_dequeue_timeout_us / 1000); 1086 } else { 1087 av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n"); 1088 } 1089 } else { 1090 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index); 1091 return AVERROR_EXTERNAL; 1092 } 1093 1094 if (s->draining && s->eos) 1095 return AVERROR_EOF; 1096 return AVERROR(EAGAIN); 1097 } 1098 1099 /* 1100 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on 1101 * the codec (because the user retains frames). The codec stays in the 1102 * flushing state. 1103 * 1104 * ff_mediacodec_dec_flush returns 1 if the flush can actually be 1105 * performed on the codec. The codec leaves the flushing state and can 1106 * process again packets. 1107 * 1108 * ff_mediacodec_dec_flush returns a negative value if an error has 1109 * occurred. 1110 */ 1111 int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s) 1112 { 1113 if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) { 1114 int ret; 1115 1116 /* No frames (holding a reference to the codec) are retained by the 1117 * user, thus we can flush the codec and returns accordingly */ 1118 if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) { 1119 return ret; 1120 } 1121 1122 return 1; 1123 } 1124 1125 s->flushing = 1; 1126 return 0; 1127 } 1128 1129 int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s) 1130 { 1131 if (!s) 1132 return 0; 1133 1134 if (s->codec) { 1135 if (atomic_load(&s->hw_buffer_count) == 0) { 1136 ff_AMediaCodec_stop(s->codec); 1137 av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p stopped\n", s->codec); 1138 } else { 1139 av_log(avctx, AV_LOG_DEBUG, "Not stopping MediaCodec (there are buffers pending)\n"); 1140 } 1141 } 1142 1143 ff_mediacodec_dec_unref(s); 1144 1145 return 0; 1146 } 1147 1148 int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s) 1149 { 1150 return s->flushing; 1151 }