videotoolbox.m 42.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
/*****************************************************************************
 * videotoolbox.m: Video Toolbox decoder
 *****************************************************************************
 * Copyright © 2014-2015 VideoLabs SAS
 *
 * Authors: Felix Paul Kühne <fkuehne # videolan.org>
 *
 * This program is free software; you can redistribute it and/or modify it
 * under the terms of the GNU Lesser General Public License as published by
 * the Free Software Foundation; either version 2.1 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with this program; if not, write to the Free Software Foundation,
 * Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
 *****************************************************************************/

#pragma mark preamble

#ifdef HAVE_CONFIG_H
# import "config.h"
#endif

#import <vlc_common.h>
#import <vlc_plugin.h>
#import <vlc_codec.h>
#import "../packetizer/h264_nal.h"
#import "../video_chroma/copy.h"
#import <vlc_bits.h>

#import <VideoToolbox/VideoToolbox.h>

#import <Foundation/Foundation.h>
#import <TargetConditionals.h>

#import <sys/types.h>
#import <sys/sysctl.h>
#import <mach/machine.h>

45 46 47 48
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#endif

49 50 51 52 53 54 55 56 57 58
#pragma mark - module descriptor

static int OpenDecoder(vlc_object_t *);
static void CloseDecoder(vlc_object_t *);

#if MAC_OS_X_VERSION_MAX_ALLOWED < 1090
const CFStringRef kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder = CFSTR("EnableHardwareAcceleratedVideoDecoder");
const CFStringRef kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder = CFSTR("RequireHardwareAcceleratedVideoDecoder");
#endif

59
#define VT_ZERO_COPY N_("Use zero-copy rendering")
60 61 62
#if !TARGET_OS_IPHONE
#define VT_REQUIRE_HW_DEC N_("Use Hardware decoders only")
#endif
63 64
#define VT_TEMPO_DEINTERLACE N_("Deinterlacing")
#define VT_TEMPO_DEINTERLACE_LONG N_("If interlaced content is detected, temporal deinterlacing is enabled at the expense of a pipeline delay.")
65 66 67 68 69 70 71

vlc_module_begin()
set_category(CAT_INPUT)
set_subcategory(SUBCAT_INPUT_VCODEC)
set_description(N_("VideoToolbox video decoder"))
set_capability("decoder",800)
set_callbacks(OpenDecoder, CloseDecoder)
72

73
add_bool("videotoolbox-temporal-deinterlacing", true, VT_TEMPO_DEINTERLACE, VT_TEMPO_DEINTERLACE_LONG, false)
74
#if !TARGET_OS_IPHONE
75
add_bool("videotoolbox-zero-copy", false, VT_ZERO_COPY, VT_ZERO_COPY, false)
76
add_bool("videotoolbox-hw-decoder-only", false, VT_REQUIRE_HW_DEC, VT_REQUIRE_HW_DEC, false)
77 78
#else
add_bool("videotoolbox-zero-copy", true, VT_ZERO_COPY, VT_ZERO_COPY, false)
79 80 81 82 83 84 85 86
#endif
vlc_module_end()

#pragma mark - local prototypes

static CFDataRef ESDSCreate(decoder_t *, uint8_t *, uint32_t);
static picture_t *DecodeBlock(decoder_t *, block_t **);
static void DecoderCallback(void *, void *, OSStatus, VTDecodeInfoFlags,
87
                            CVPixelBufferRef, CMTime, CMTime);
88 89 90 91 92
void VTDictionarySetInt32(CFMutableDictionaryRef, CFStringRef, int);
static void copy420YpCbCr8Planar(picture_t *, CVPixelBufferRef buffer,
                                 unsigned i_width, unsigned i_height);
static BOOL deviceSupportsAdvancedProfiles();

93 94 95 96
struct picture_sys_t {
    CFTypeRef pixelBuffer;
};

97 98 99 100 101 102 103
#pragma mark - decoder structure

struct decoder_sys_t
{
    CMVideoCodecType            codec;
    size_t                      codec_profile;
    size_t                      codec_level;
104
    uint32_t                    i_nal_length_size;
105 106

    bool                        b_started;
107
    bool                        b_is_avcc;
108 109 110
    VTDecompressionSessionRef   session;
    CMVideoFormatDescriptionRef videoFormatDescription;

111 112
    NSMutableArray              *outputTimeStamps;
    NSMutableDictionary         *outputFrames;
113
    bool                        b_zero_copy;
114
    bool                        b_enable_temporal_processing;
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177
};

#pragma mark - start & stop

static CMVideoCodecType CodecPrecheck(decoder_t *p_dec)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    size_t i_profile = 0xFFFF, i_level = 0xFFFF;
    bool b_ret = false;
    CMVideoCodecType codec;

    /* check for the codec we can and want to decode */
    switch (p_dec->fmt_in.i_codec) {
        case VLC_CODEC_H264:
            codec = kCMVideoCodecType_H264;

            b_ret = h264_get_profile_level(&p_dec->fmt_in, &i_profile, &i_level, NULL);
            if (!b_ret) {
                msg_Warn(p_dec, "H264 profile and level parsing failed because it didn't arrive yet");
                return kCMVideoCodecType_H264;
            }

            msg_Dbg(p_dec, "trying to decode MPEG-4 Part 10: profile %zu, level %zu", i_profile, i_level);

            switch (i_profile) {
                case PROFILE_H264_BASELINE:
                case PROFILE_H264_MAIN:
                case PROFILE_H264_HIGH:
                    break;

                case PROFILE_H264_HIGH_10:
                {
                    if (deviceSupportsAdvancedProfiles())
                        break;
                }

                default:
                {
                    msg_Dbg(p_dec, "unsupported H264 profile %zu", i_profile);
                    return -1;
                }
            }

#if !TARGET_OS_IPHONE
            /* a level higher than 5.2 was not tested, so don't dare to
             * try to decode it*/
            if (i_level > 52)
                return -1;
#else
            /* on SoC A8, 4.2 is the highest specified profile */
            if (i_level > 42)
                return -1;
#endif

            break;
        case VLC_CODEC_MP4V:
            codec = kCMVideoCodecType_MPEG4Video;
            break;
        case VLC_CODEC_H263:
            codec = kCMVideoCodecType_H263;
            break;

#if !TARGET_OS_IPHONE
178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
        /* there are no DV or ProRes decoders on iOS, so bailout early */
        case VLC_CODEC_PRORES:
            /* the VT decoder can't differenciate between the ProRes flavors, so we do it */
            switch (p_dec->fmt_in.i_original_fourcc) {
                case VLC_FOURCC( 'a','p','4','c' ):
                case VLC_FOURCC( 'a','p','4','h' ):
                    codec = kCMVideoCodecType_AppleProRes4444;
                    break;

                case VLC_FOURCC( 'a','p','c','h' ):
                    codec = kCMVideoCodecType_AppleProRes422HQ;
                    break;

                case VLC_FOURCC( 'a','p','c','s' ):
                    codec = kCMVideoCodecType_AppleProRes422LT;
                    break;

                case VLC_FOURCC( 'a','p','c','o' ):
                    codec = kCMVideoCodecType_AppleProRes422Proxy;
                    break;

                default:
                    codec = kCMVideoCodecType_AppleProRes422;
                    break;
            }
            if (codec != 0)
                break;

206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227
        case VLC_CODEC_DV:
            /* the VT decoder can't differenciate between PAL and NTSC, so we need to do it */
            switch (p_dec->fmt_in.i_original_fourcc) {
                case VLC_FOURCC( 'd', 'v', 'c', ' '):
                case VLC_FOURCC( 'd', 'v', ' ', ' '):
                    msg_Dbg(p_dec, "Decoding DV NTSC");
                    codec = kCMVideoCodecType_DVCNTSC;
                    break;

                case VLC_FOURCC( 'd', 'v', 's', 'd'):
                case VLC_FOURCC( 'd', 'v', 'c', 'p'):
                case VLC_FOURCC( 'D', 'V', 'S', 'D'):
                    msg_Dbg(p_dec, "Decoding DV PAL");
                    codec = kCMVideoCodecType_DVCPAL;
                    break;

                default:
                    break;
            }
            if (codec != 0)
                break;
#endif
228 229
            /* mpgv / mp2v needs fixing, so disable it for now */
#if 0
230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263
        case VLC_CODEC_MPGV:
            codec = kCMVideoCodecType_MPEG1Video;
            break;
        case VLC_CODEC_MP2V:
            codec = kCMVideoCodecType_MPEG2Video;
            break;
#endif

        default:
#ifndef NDEBUG
            msg_Err(p_dec, "'%4.4s' is not supported", (char *)&p_dec->fmt_in.i_codec);
#endif
            return -1;
    }

    return codec;
}

static int StartVideoToolbox(decoder_t *p_dec, block_t *p_block)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    OSStatus status;

    /* setup the decoder */
    CFMutableDictionaryRef decoderConfiguration = CFDictionaryCreateMutable(kCFAllocatorDefault,
                                                                            2,
                                                                            &kCFTypeDictionaryKeyCallBacks,
                                                                            &kCFTypeDictionaryValueCallBacks);
    CFDictionarySetValue(decoderConfiguration,
                         kCVImageBufferChromaLocationBottomFieldKey,
                         kCVImageBufferChromaLocation_Left);
    CFDictionarySetValue(decoderConfiguration,
                         kCVImageBufferChromaLocationTopFieldKey,
                         kCVImageBufferChromaLocation_Left);
264
    p_sys->b_zero_copy = var_InheritBool(p_dec, "videotoolbox-zero-copy");
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280

    /* fetch extradata */
    CFMutableDictionaryRef extradata_info = NULL;
    CFDataRef extradata = NULL;

    extradata_info = CFDictionaryCreateMutable(kCFAllocatorDefault,
                                               1,
                                               &kCFTypeDictionaryKeyCallBacks,
                                               &kCFTypeDictionaryValueCallBacks);

    int i_video_width = 0;
    int i_video_height = 0;
    int i_sar_den = 0;
    int i_sar_num = 0;

    if (p_sys->codec == kCMVideoCodecType_H264) {
281 282 283
        /* Do a late opening if there is no extra data and no valid video size */
        if ((p_dec->fmt_in.video.i_width == 0 || p_dec->fmt_in.video.i_height == 0
          || p_dec->fmt_in.i_extra == 0) && p_block == NULL) {
284
            msg_Dbg(p_dec, "waiting for H264 SPS/PPS, will start late");
285

286
            return VLC_SUCCESS;
287 288 289
        }

        uint32_t size;
Thomas Guillem's avatar
Thomas Guillem committed
290
        void *p_buf, *p_alloc_buf = NULL;
291 292 293 294
        int i_ret = 0;

        if (p_block == NULL) {
            int buf_size = p_dec->fmt_in.i_extra + 20;
295
            uint32_t i_nal_size = 0;
296 297
            size = p_dec->fmt_in.i_extra;

Thomas Guillem's avatar
Thomas Guillem committed
298
            p_alloc_buf = p_buf = malloc(buf_size);
299
            if (!p_buf) {
300 301 302 303 304
                msg_Warn(p_dec, "extra buffer allocation failed");
                return VLC_ENOMEM;
            }

            /* we need to convert the SPS and PPS units we received from the
305
             * demuxer's avvC atom so we can process them further */
306 307 308 309 310 311
            i_ret = convert_sps_pps(p_dec,
                                    p_dec->fmt_in.p_extra,
                                    p_dec->fmt_in.i_extra,
                                    p_buf,
                                    buf_size,
                                    &size,
312
                                    &p_sys->i_nal_length_size);
313
            p_sys->b_is_avcc = i_ret == VLC_SUCCESS;
314 315 316 317 318
        } else {
            /* we are mid-stream, let's have the h264_get helper see if it
             * can find a NAL unit */
            size = p_block->i_buffer;
            p_buf = p_block->p_buffer;
319
            p_sys->i_nal_length_size = 4; /* default to 4 bytes */
320 321 322
            i_ret = VLC_SUCCESS;
        }

323
        if (i_ret != VLC_SUCCESS) {
Thomas Guillem's avatar
Thomas Guillem committed
324
            free(p_alloc_buf);
325 326 327 328 329 330
            return VLC_EGENERIC;
        }

        uint8_t *p_sps_buf = NULL, *p_pps_buf = NULL;
        size_t i_sps_size = 0, i_pps_size = 0;
        if (!p_buf) {
Thomas Guillem's avatar
Thomas Guillem committed
331
            free(p_alloc_buf);
332 333 334 335 336 337 338 339 340 341 342
            return VLC_EGENERIC;
        }

        /* get the SPS and PPS units from the NAL unit which is either
         * part of the demuxer's avvC atom or the mid stream data block */
        i_ret = h264_get_spspps(p_buf,
                                size,
                                &p_sps_buf,
                                &i_sps_size,
                                &p_pps_buf,
                                &i_pps_size);
343
        if (i_ret != VLC_SUCCESS) {
344
            msg_Warn(p_dec, "sps pps parsing failed");
Thomas Guillem's avatar
Thomas Guillem committed
345
            free(p_alloc_buf);
346 347 348 349 350 351 352 353
            return VLC_EGENERIC;
        }

        struct nal_sps sps_data;
        i_ret = h264_parse_sps(p_sps_buf,
                               i_sps_size,
                               &sps_data);

354
        if (i_ret != VLC_SUCCESS) {
Thomas Guillem's avatar
Thomas Guillem committed
355
            free(p_alloc_buf);
356 357 358 359 360 361 362 363 364 365 366 367 368 369 370
            return VLC_EGENERIC;
        }
        /* this data is more trust-worthy than what we receive
         * from the demuxer, so we will use it to over-write
         * the current values */
        i_video_width = sps_data.i_width;
        i_video_height = sps_data.i_height;
        i_sar_den = sps_data.vui.i_sar_den;
        i_sar_num = sps_data.vui.i_sar_num;

        /* no evaluation here as this is done in the precheck */
        p_sys->codec_profile = sps_data.i_profile;
        p_sys->codec_level = sps_data.i_level;

        /* create avvC atom to forward to the HW decoder */
371 372
        block_t *p_block = h264_create_avcdec_config_record(
                                p_sys->i_nal_length_size,
373 374
                                &sps_data, p_sps_buf, i_sps_size,
                                p_pps_buf, i_pps_size);
Thomas Guillem's avatar
Thomas Guillem committed
375
        free(p_alloc_buf);
376 377
        if (!p_block)
            return VLC_EGENERIC;
378

379
        extradata = CFDataCreate(kCFAllocatorDefault,
380 381 382
                                 p_block->p_buffer,
                                 p_block->i_buffer);
        block_Release(p_block);
383

384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
        if (extradata)
            CFDictionarySetValue(extradata_info, CFSTR("avcC"), extradata);

        CFDictionarySetValue(decoderConfiguration,
                             kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
                             extradata_info);

    } else if (p_sys->codec == kCMVideoCodecType_MPEG4Video) {
        extradata = ESDSCreate(p_dec,
                               (uint8_t*)p_dec->fmt_in.p_extra,
                               p_dec->fmt_in.i_extra);

        if (extradata)
            CFDictionarySetValue(extradata_info, CFSTR("esds"), extradata);

        CFDictionarySetValue(decoderConfiguration,
                             kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
                             extradata_info);
    } else {
        CFDictionarySetValue(decoderConfiguration,
                             kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms,
                             extradata_info);
    }

    if (extradata)
        CFRelease(extradata);
    CFRelease(extradata_info);

    /* pixel aspect ratio */
    CFMutableDictionaryRef pixelaspectratio = CFDictionaryCreateMutable(kCFAllocatorDefault,
                                                                        2,
                                                                        &kCFTypeDictionaryKeyCallBacks,
                                                                        &kCFTypeDictionaryValueCallBacks);
    /* fallback on the demuxer if we don't have better info */
    if (i_video_width == 0)
        i_video_width = p_dec->fmt_in.video.i_width;
    if (i_video_height == 0)
        i_video_height = p_dec->fmt_in.video.i_height;
    if (i_sar_num == 0)
        i_sar_num = p_dec->fmt_in.video.i_sar_num ? p_dec->fmt_in.video.i_sar_num : 1;
    if (i_sar_den == 0)
        i_sar_den = p_dec->fmt_in.video.i_sar_den ? p_dec->fmt_in.video.i_sar_den : 1;

    VTDictionarySetInt32(pixelaspectratio,
                         kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
                         i_sar_num);
    VTDictionarySetInt32(pixelaspectratio,
                         kCVImageBufferPixelAspectRatioVerticalSpacingKey,
                         i_sar_den);
    CFDictionarySetValue(decoderConfiguration,
                         kCVImageBufferPixelAspectRatioKey,
                         pixelaspectratio);
    CFRelease(pixelaspectratio);

#if !TARGET_OS_IPHONE
    /* enable HW accelerated playback, since this is optional on OS X
     * note that the backend may still fallback on software mode if no
     * suitable hardware is available */
    CFDictionarySetValue(decoderConfiguration,
                         kVTVideoDecoderSpecification_EnableHardwareAcceleratedVideoDecoder,
                         kCFBooleanTrue);

    /* on OS X, we can force VT to fail if no suitable HW decoder is available,
     * preventing the aforementioned SW fallback */
    if (var_InheritInteger(p_dec, "videotoolbox-hw-decoder-only"))
        CFDictionarySetValue(decoderConfiguration,
                             kVTVideoDecoderSpecification_RequireHardwareAcceleratedVideoDecoder,
                             kCFBooleanTrue);
#endif

454 455
    p_sys->b_enable_temporal_processing = false;
    if (var_InheritInteger(p_dec, "videotoolbox-temporal-deinterlacing")) {
456 457 458 459 460 461 462 463
        if (p_block != NULL) {
            if (p_block->i_flags & BLOCK_FLAG_TOP_FIELD_FIRST ||
                p_block->i_flags & BLOCK_FLAG_BOTTOM_FIELD_FIRST) {
                msg_Dbg(p_dec, "Interlaced content detected, inserting temporal deinterlacer");
                CFDictionarySetValue(decoderConfiguration, kVTDecompressionPropertyKey_FieldMode, kVTDecompressionProperty_FieldMode_DeinterlaceFields);
                CFDictionarySetValue(decoderConfiguration, kVTDecompressionPropertyKey_DeinterlaceMode, kVTDecompressionProperty_DeinterlaceMode_Temporal);
                p_sys->b_enable_temporal_processing = true;
            }
464 465 466
        }
    }

467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484
    /* create video format description */
    status = CMVideoFormatDescriptionCreate(kCFAllocatorDefault,
                                            p_sys->codec,
                                            i_video_width,
                                            i_video_height,
                                            decoderConfiguration,
                                            &p_sys->videoFormatDescription);
    if (status) {
        CFRelease(decoderConfiguration);
        msg_Err(p_dec, "video format description creation failed (%i)", status);
        return VLC_EGENERIC;
    }

    /* destination pixel buffer attributes */
    CFMutableDictionaryRef dpba = CFDictionaryCreateMutable(kCFAllocatorDefault,
                                                            2,
                                                            &kCFTypeDictionaryKeyCallBacks,
                                                            &kCFTypeDictionaryValueCallBacks);
485

486 487 488
#if !TARGET_OS_IPHONE
    CFDictionarySetValue(dpba,
                         kCVPixelBufferOpenGLCompatibilityKey,
489
                         kCFBooleanTrue);
490 491 492
#else
    CFDictionarySetValue(dpba,
                         kCVPixelBufferOpenGLESCompatibilityKey,
493
                         kCFBooleanTrue);
494
#endif
495 496 497 498 499 500 501 502 503 504 505

    /* full range allows a broader range of colors but is H264 only */
    if (p_sys->codec == kCMVideoCodecType_H264) {
        VTDictionarySetInt32(dpba,
                             kCVPixelBufferPixelFormatTypeKey,
                             kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
    } else {
        VTDictionarySetInt32(dpba,
                             kCVPixelBufferPixelFormatTypeKey,
                             kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
    }
506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563
    VTDictionarySetInt32(dpba,
                         kCVPixelBufferWidthKey,
                         i_video_width);
    VTDictionarySetInt32(dpba,
                         kCVPixelBufferHeightKey,
                         i_video_height);
    VTDictionarySetInt32(dpba,
                         kCVPixelBufferBytesPerRowAlignmentKey,
                         i_video_width * 2);

    /* setup decoder callback record */
    VTDecompressionOutputCallbackRecord decoderCallbackRecord;
    decoderCallbackRecord.decompressionOutputCallback = DecoderCallback;
    decoderCallbackRecord.decompressionOutputRefCon = p_dec;

    /* create decompression session */
    status = VTDecompressionSessionCreate(kCFAllocatorDefault,
                                          p_sys->videoFormatDescription,
                                          decoderConfiguration,
                                          dpba,
                                          &decoderCallbackRecord,
                                          &p_sys->session);

    /* release no longer needed storage items */
    CFRelease(dpba);
    CFRelease(decoderConfiguration);

    /* check if the session is valid */
    if (status) {

        switch (status) {
            case -12470:
                msg_Err(p_dec, "VT is not supported on this hardware");
                break;
            case -12471:
                msg_Err(p_dec, "Video format is not supported by VT");
                break;
            case -12903:
                msg_Err(p_dec, "created session is invalid, could not select and open decoder instance");
                break;
            case -12906:
                msg_Err(p_dec, "could not find decoder");
                break;
            case -12910:
                msg_Err(p_dec, "unsupported data");
                break;
            case -12913:
                msg_Err(p_dec, "VT is not available to sandboxed apps on this OS release");
                break;
            case -12917:
                msg_Err(p_dec, "Insufficient source color data");
                break;
            case -12918:
                msg_Err(p_dec, "Could not create color correction data");
                break;
            case -12210:
                msg_Err(p_dec, "Insufficient authorization to create decoder");
                break;
564 565 566
            case -8973:
                msg_Err(p_dec, "Could not select and open decoder instance");
                break;
567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584

            default:
                msg_Err(p_dec, "Decompression session creation failed (%i)", status);
                break;
        }
        return VLC_EGENERIC;
    }

    p_dec->fmt_out.video.i_width = i_video_width;
    p_dec->fmt_out.video.i_height = i_video_height;
    p_dec->fmt_out.video.i_sar_den = i_sar_den;
    p_dec->fmt_out.video.i_sar_num = i_sar_num;

    if (p_block) {
        /* this is a mid stream change so we need to tell the core about it */
        decoder_UpdateVideoFormat(p_dec);
    }

585 586 587 588 589 590
    /* setup storage */
    p_sys->outputTimeStamps = [[NSMutableArray alloc] init];
    p_sys->outputFrames = [[NSMutableDictionary alloc] init];
    if (!p_sys->outputFrames)
        return VLC_ENOMEM;

591 592 593 594 595 596 597 598 599 600
    p_sys->b_started = YES;

    return VLC_SUCCESS;
}

static void StopVideoToolbox(decoder_t *p_dec)
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    if (p_sys->b_started) {
601 602
        if (p_sys->outputTimeStamps != nil)
            CFRelease(p_sys->outputTimeStamps);
603
        p_sys->outputTimeStamps = nil;
604 605
        if (p_sys->outputFrames != nil)
            CFRelease(p_sys->outputFrames);
606 607
        p_sys->outputFrames = nil;

608
        p_sys->b_started = false;
609
        if (p_sys->session != nil) {
610 611
            VTDecompressionSessionInvalidate(p_sys->session);
            CFRelease(p_sys->session);
612
            p_sys->session = nil;
613 614 615
        }
    }

616
    if (p_sys->videoFormatDescription != nil) {
617
        CFRelease(p_sys->videoFormatDescription);
618 619
        p_sys->videoFormatDescription = nil;
    }
620 621 622 623 624 625 626
}

#pragma mark - module open and close

static int OpenDecoder(vlc_object_t *p_this)
{
    decoder_t *p_dec = (decoder_t *)p_this;
627 628 629 630 631 632 633

#if TARGET_OS_IPHONE
    if (unlikely([[UIDevice currentDevice].systemVersion floatValue] < 8.0)) {
        msg_Warn(p_dec, "decoder skipped as OS is too old");
        return VLC_EGENERIC;
    }
#endif
634

635 636 637
    if (p_dec->fmt_in.i_cat != VIDEO_ES)
        return VLC_EGENERIC;

638
    /* check quickly if we can digest the offered data */
639
    CMVideoCodecType codec;
640 641 642 643 644 645 646 647 648 649 650 651
    codec = CodecPrecheck(p_dec);
    if (codec == -1)
        return VLC_EGENERIC;

    /* now that we see a chance to decode anything, allocate the
     * internals and start the decoding session */
    decoder_sys_t *p_sys;
    p_sys = malloc(sizeof(*p_sys));
    if (!p_sys)
        return VLC_ENOMEM;
    p_dec->p_sys = p_sys;
    p_sys->b_started = false;
652
    p_sys->b_is_avcc = false;
653
    p_sys->codec = codec;
654
    p_sys->videoFormatDescription = nil;
655 656 657 658 659 660 661 662

    int i_ret = StartVideoToolbox(p_dec, NULL);
    if (i_ret != VLC_SUCCESS) {
        CloseDecoder(p_this);
        return i_ret;
    }

    /* return our proper VLC internal state */
663 664 665
    p_dec->fmt_out.i_cat = p_dec->fmt_in.i_cat;
    p_dec->fmt_out.video = p_dec->fmt_in.video;
    p_dec->fmt_out.audio = p_dec->fmt_in.audio;
666 667
    if (p_sys->b_zero_copy) {
        msg_Dbg(p_dec, "zero-copy rendering pipeline enabled");
668
        p_dec->fmt_out.i_codec = VLC_CODEC_CVPX_OPAQUE;
669 670
    } else {
        msg_Dbg(p_dec, "copy rendering pipeline enabled");
671
        p_dec->fmt_out.i_codec = VLC_CODEC_I420;
672
    }
673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764

    p_dec->pf_decode_video = DecodeBlock;

    msg_Info(p_dec, "Using Video Toolbox to decode '%4.4s'", (char *)&p_dec->fmt_in.i_codec);

    return VLC_SUCCESS;
}

static void CloseDecoder(vlc_object_t *p_this)
{
    decoder_t *p_dec = (decoder_t *)p_this;
    decoder_sys_t *p_sys = p_dec->p_sys;

    if (p_sys->session && p_sys->b_started) {
        VTDecompressionSessionWaitForAsynchronousFrames(p_sys->session);
    }
    StopVideoToolbox(p_dec);

    free(p_sys);
}

#pragma mark - helpers

static BOOL deviceSupportsAdvancedProfiles()
{
#if TARGET_IPHONE_SIMULATOR
    return NO;
#endif
#if TARGET_OS_IPHONE
    size_t size;
    cpu_type_t type;

    size = sizeof(type);
    sysctlbyname("hw.cputype", &type, &size, NULL, 0);

    /* Support for H264 profile HIGH 10 was introduced with the first 64bit Apple ARM SoC, the A7 */
    if (type == CPU_TYPE_ARM64)
        return YES;

    return NO;
#else
    return NO;
#endif
}

static inline void bo_add_mp4_tag_descr(bo_t *p_bo, uint8_t tag, uint32_t size)
{
    bo_add_8(p_bo, tag);
    for (int i = 3; i>0; i--)
        bo_add_8(p_bo, (size>>(7*i)) | 0x80);
    bo_add_8(p_bo, size & 0x7F);
}

static CFDataRef ESDSCreate(decoder_t *p_dec, uint8_t *p_buf, uint32_t i_buf_size)
{
    int full_size = 3 + 5 +13 + 5 + i_buf_size + 3;
    int config_size = 13 + 5 + i_buf_size;
    int padding = 12;

    bo_t bo;
    bool status = bo_init(&bo, 1024);
    if (status != true)
        return NULL;

    bo_add_8(&bo, 0);       // Version
    bo_add_24be(&bo, 0);    // Flags

    // elementary stream description tag
    bo_add_mp4_tag_descr(&bo, 0x03, full_size);
    bo_add_16be(&bo, 0);    // esid
    bo_add_8(&bo, 0);       // stream priority (0-3)

    // decoder configuration description tag
    bo_add_mp4_tag_descr(&bo, 0x04, config_size);
    bo_add_8(&bo, 32);      // object type identification (32 == MPEG4)
    bo_add_8(&bo, 0x11);    // stream type
    bo_add_24be(&bo, 0);    // buffer size
    bo_add_32be(&bo, 0);    // max bitrate
    bo_add_32be(&bo, 0);    // avg bitrate

    // decoder specific description tag
    bo_add_mp4_tag_descr(&bo, 0x05, i_buf_size);
    bo_add_mem(&bo, i_buf_size, p_buf);

    // sync layer configuration description tag
    bo_add_8(&bo, 0x06);    // tag
    bo_add_8(&bo, 0x01);    // length
    bo_add_8(&bo, 0x02);    // no SL

    CFDataRef data = CFDataCreate(kCFAllocatorDefault,
                                  bo.b->p_buffer,
                                  bo.b->i_buffer);
Thomas Guillem's avatar
Thomas Guillem committed
765
    bo_deinit(&bo);
766 767 768
    return data;
}

769
static block_t *H264ProcessBlock(decoder_t *p_dec, block_t *p_block)
770 771 772
{
    decoder_sys_t *p_sys = p_dec->p_sys;

773
    if (p_sys->b_is_avcc)
774
        return p_block;
775

776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819
    uint8_t *p_sps_buf = NULL, *p_pps_buf = NULL;
    size_t i_sps_size = 0, i_pps_size = 0;
    int i_ret = 0;

    i_ret = h264_get_spspps(p_block->p_buffer,
                            p_block->i_buffer,
                            &p_sps_buf,
                            &i_sps_size,
                            &p_pps_buf,
                            &i_pps_size);

    if (i_ret == VLC_SUCCESS) {
        struct nal_sps sps_data;
        i_ret = h264_parse_sps(p_sps_buf,
                               i_sps_size,
                               &sps_data);

        if (i_ret == VLC_SUCCESS) {
            bool b_something_changed = false;

            if (p_sys->codec_profile != sps_data.i_profile) {
                msg_Warn(p_dec, "mid stream profile change found, restarting decoder");
                b_something_changed = true;
            } else if (p_sys->codec_level != sps_data.i_level) {
                msg_Warn(p_dec, "mid stream level change found, restarting decoder");
                b_something_changed = true;
            } else if (p_dec->fmt_out.video.i_width != sps_data.i_width) {
                msg_Warn(p_dec, "mid stream width change found, restarting decoder");
                b_something_changed = true;
            } else if (p_dec->fmt_out.video.i_height != sps_data.i_height) {
                msg_Warn(p_dec, "mid stream height change found, restarting decoder");
                b_something_changed = true;
            } else if (p_dec->fmt_out.video.i_sar_den != sps_data.vui.i_sar_den) {
                msg_Warn(p_dec, "mid stream SAR DEN change found, restarting decoder");
                b_something_changed = true;
            } else if (p_dec->fmt_out.video.i_sar_num != sps_data.vui.i_sar_num) {
                msg_Warn(p_dec, "mid stream SAR NUM change found, restarting decoder");
                b_something_changed = true;
            }

            if (b_something_changed) {
                p_sys->codec_profile = sps_data.i_profile;
                p_sys->codec_level = sps_data.i_level;
                StopVideoToolbox(p_dec);
820 821
                block_Release(p_block);
                return NULL;
822 823 824 825
            }
        }
    }

826
    return convert_annexb_to_h264(p_block, p_sys->i_nal_length_size);
827 828 829 830 831
}

static CMSampleBufferRef VTSampleBufferCreate(decoder_t *p_dec,
                                              CMFormatDescriptionRef fmt_desc,
                                              void *buffer,
832
                                              size_t size,
833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876
                                              mtime_t i_pts,
                                              mtime_t i_dts,
                                              mtime_t i_length)
{
    OSStatus status;
    CMBlockBufferRef  block_buf = NULL;
    CMSampleBufferRef sample_buf = NULL;

    CMSampleTimingInfo timeInfo;
    CMSampleTimingInfo timeInfoArray[1];

    timeInfo.duration = CMTimeMake(i_length, 1);
    timeInfo.presentationTimeStamp = CMTimeMake(i_pts > 0 ? i_pts : i_dts, CLOCK_FREQ);
    timeInfo.decodeTimeStamp = CMTimeMake(i_dts, CLOCK_FREQ);
    timeInfoArray[0] = timeInfo;

    status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,// structureAllocator
                                                buffer,             // memoryBlock
                                                size,               // blockLength
                                                kCFAllocatorNull,   // blockAllocator
                                                NULL,               // customBlockSource
                                                0,                  // offsetToData
                                                size,               // dataLength
                                                false,              // flags
                                                &block_buf);

    if (!status) {
        status = CMSampleBufferCreate(kCFAllocatorDefault,  // allocator
                                      block_buf,            // dataBuffer
                                      TRUE,                 // dataReady
                                      0,                    // makeDataReadyCallback
                                      0,                    // makeDataReadyRefcon
                                      fmt_desc,             // formatDescription
                                      1,                    // numSamples
                                      1,                    // numSampleTimingEntries
                                      timeInfoArray,        // sampleTimingArray
                                      0,                    // numSampleSizeEntries
                                      NULL,                 // sampleSizeArray
                                      &sample_buf);
        if (status != noErr)
            msg_Warn(p_dec, "sample buffer creation failure %i", status);
    } else
        msg_Warn(p_dec, "cm block buffer creation failure %i", status);

877
    if (block_buf != nil)
878
        CFRelease(block_buf);
879
    block_buf = nil;
880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930

    return sample_buf;
}

void VTDictionarySetInt32(CFMutableDictionaryRef dict, CFStringRef key, int value)
{
    CFNumberRef number;
    number = CFNumberCreate(NULL, kCFNumberSInt32Type, &value);
    CFDictionarySetValue(dict, key, number);
    CFRelease(number);
}

static void copy420YpCbCr8Planar(picture_t *p_pic,
                                 CVPixelBufferRef buffer,
                                 unsigned i_width,
                                 unsigned i_height)
{
    uint8_t *pp_plane[2];
    size_t pi_pitch[2];

    if (!buffer)
        return;

    CVPixelBufferLockBaseAddress(buffer, 0);

    for (int i = 0; i < 2; i++) {
        pp_plane[i] = CVPixelBufferGetBaseAddressOfPlane(buffer, i);
        pi_pitch[i] = CVPixelBufferGetBytesPerRowOfPlane(buffer, i);
    }

    CopyFromNv12ToI420(p_pic, pp_plane, pi_pitch, i_width, i_height);

    CVPixelBufferUnlockBaseAddress(buffer, 0);
}

#pragma mark - actual decoding

static picture_t *DecodeBlock(decoder_t *p_dec, block_t **pp_block)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_block;
    VTDecodeFrameFlags decoderFlags = 0;
    VTDecodeInfoFlags flagOut;
    OSStatus status;
    int i_ret = 0;

    if (!pp_block)
        return NULL;

    p_block = *pp_block;

931
    if (likely(p_block != NULL)) {
932
        if (unlikely(p_block->i_flags&(BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED))) {
933 934 935 936 937 938 939 940
            if (likely(p_sys->b_started)) {
                @synchronized(p_sys->outputTimeStamps) {
                    [p_sys->outputTimeStamps removeAllObjects];
                }
                @synchronized(p_sys->outputFrames) {
                    [p_sys->outputFrames removeAllObjects];
                }
            }
941 942 943 944 945 946 947 948 949 950 951 952
            block_Release(p_block);
            goto skip;
        }

        /* feed to vt */
        if (likely(p_block->i_buffer)) {
            if (!p_sys->b_started) {
                /* decoding didn't start yet, which is ok for H264, let's see
                 * if we can use this block to get going */
                p_sys->codec = kCMVideoCodecType_H264;
                i_ret = StartVideoToolbox(p_dec, p_block);
            }
953 954
            if (i_ret != VLC_SUCCESS || !p_sys->b_started) {
                *pp_block = NULL;
955
                return NULL;
956
            }
957 958

            if (p_sys->codec == kCMVideoCodecType_H264) {
959 960 961
                p_block = H264ProcessBlock(p_dec, p_block);
                if (!p_block)
                {
962
                    *pp_block = NULL;
963
                    return NULL;
964
                }
965 966 967 968 969 970 971 972 973 974
            }

            CMSampleBufferRef sampleBuffer;
            sampleBuffer = VTSampleBufferCreate(p_dec,
                                                p_sys->videoFormatDescription,
                                                p_block->p_buffer,
                                                p_block->i_buffer,
                                                p_block->i_pts,
                                                p_block->i_dts,
                                                p_block->i_length);
975
            if (likely(sampleBuffer)) {
976 977 978 979
                if (likely(!p_sys->b_enable_temporal_processing))
                    decoderFlags = kVTDecodeFrame_EnableAsynchronousDecompression;
                else
                    decoderFlags = kVTDecodeFrame_EnableAsynchronousDecompression | kVTDecodeFrame_EnableTemporalProcessing;
980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995

                status = VTDecompressionSessionDecodeFrame(p_sys->session,
                                                           sampleBuffer,
                                                           decoderFlags,
                                                           NULL, // sourceFrameRefCon
                                                           &flagOut); // infoFlagsOut
                if (status != noErr) {
                    if (status == kCVReturnInvalidSize)
                        msg_Err(p_dec, "decoder failure: invalid block size");
                    else if (status == -666)
                        msg_Err(p_dec, "decoder failure: invalid SPS/PPS");
                    else if (status == -6661) {
                        msg_Err(p_dec, "decoder failure: invalid argument");
                        p_dec->b_error = true;
                    } else if (status == -8969 || status == -12909) {
                        msg_Err(p_dec, "decoder failure: bad data");
996
                        StopVideoToolbox(p_dec);
997 998 999
                        if (likely(sampleBuffer != nil))
                            CFRelease(sampleBuffer);
                        sampleBuffer = nil;
1000 1001 1002
                        block_Release(p_block);
                        *pp_block = NULL;
                        return NULL;
1003 1004 1005 1006 1007 1008 1009
                    } else if (status == -12911 || status == -8960) {
                        msg_Err(p_dec, "decoder failure: internal malfunction");
                        p_dec->b_error = true;
                    } else
                        msg_Dbg(p_dec, "decoding frame failed (%i)", status);
                }

1010 1011 1012
                if (likely(sampleBuffer != nil))
                    CFRelease(sampleBuffer);
                sampleBuffer = nil;
1013 1014 1015 1016 1017 1018 1019 1020 1021 1022
            }
        }

        block_Release(p_block);
    }

skip:

    *pp_block = NULL;

1023 1024 1025
    if (unlikely(!p_sys->b_started))
        return NULL;

1026 1027 1028
    NSUInteger outputFramesCount = [p_sys->outputFrames count];

    if (outputFramesCount > 5) {
1029 1030 1031 1032
        CVPixelBufferRef imageBuffer = NULL;
        id imageBufferObject = nil;
        picture_t *p_pic = NULL;

1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043
        NSString *timeStamp;
        @synchronized(p_sys->outputTimeStamps) {
            [p_sys->outputTimeStamps sortUsingComparator:^(id obj1, id obj2) {
                if ([obj1 longLongValue] > [obj2 longLongValue]) {
                    return (NSComparisonResult)NSOrderedDescending;
                }
                if ([obj1 longLongValue] < [obj2 longLongValue]) {
                    return (NSComparisonResult)NSOrderedAscending;
                }
                return (NSComparisonResult)NSOrderedSame;
            }];
1044
            NSMutableArray *timeStamps = p_sys->outputTimeStamps;
1045
            timeStamp = [timeStamps firstObject];
1046
            if (timeStamps.count > 0) {
1047 1048
                [timeStamps removeObjectAtIndex:0];
            }
1049
        }
1050

1051 1052 1053 1054
        @synchronized(p_sys->outputFrames) {
            imageBufferObject = [p_sys->outputFrames objectForKey:timeStamp];
        }
        imageBuffer = (__bridge CVPixelBufferRef)imageBufferObject;
1055

1056 1057 1058
        if (imageBuffer != NULL) {
            if (CVPixelBufferGetDataSize(imageBuffer) > 0) {
                p_pic = decoder_NewPicture(p_dec);
1059

1060 1061
                if (!p_pic)
                    return NULL;
1062

1063 1064 1065 1066 1067 1068 1069
                if (!p_sys->b_zero_copy) {
                    /* ehm, *cough*, memcpy.. */
                    copy420YpCbCr8Planar(p_pic,
                                         imageBuffer,
                                         CVPixelBufferGetWidthOfPlane(imageBuffer, 0),
                                         CVPixelBufferGetHeightOfPlane(imageBuffer, 0));
                } else {
1070 1071 1072 1073 1074 1075 1076
                    /* the structure is allocated by the vout's pool */
                    if (p_pic->p_sys) {
                        /* if we received a recycled picture from the pool
                         * we need release the previous reference first,
                         * otherwise we would leak it */
                        if (p_pic->p_sys->pixelBuffer != nil) {
                            CFRelease(p_pic->p_sys->pixelBuffer);
1077
                            p_pic->p_sys->pixelBuffer = nil;
1078 1079
                        }

1080
                        p_pic->p_sys->pixelBuffer = CFBridgingRetain(imageBufferObject);
1081
                    }
1082 1083
                    /* will be freed by the vout */
                }
1084

1085
                p_pic->date = timeStamp.longLongValue;
1086

1087 1088 1089 1090
                if (imageBufferObject) {
                    @synchronized(p_sys->outputFrames) {
                        [p_sys->outputFrames removeObjectForKey:timeStamp];
                    }
1091 1092 1093 1094 1095 1096 1097 1098 1099 1100
                }
            }
        }
        return p_pic;
    }

    return NULL;
}

static void DecoderCallback(void *decompressionOutputRefCon,
1101 1102 1103 1104 1105 1106
                            void *sourceFrameRefCon,
                            OSStatus status,
                            VTDecodeInfoFlags infoFlags,
                            CVPixelBufferRef imageBuffer,
                            CMTime pts,
                            CMTime duration)
1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128
{
    VLC_UNUSED(sourceFrameRefCon);
    VLC_UNUSED(duration);
    decoder_t *p_dec = (decoder_t *)decompressionOutputRefCon;
    decoder_sys_t *p_sys = p_dec->p_sys;

#ifndef NDEBUG
    static BOOL outputdone = NO;
    if (!outputdone) {
        /* attachments include all kind of debug info */
        CFDictionaryRef attachments = CVBufferGetAttachments(imageBuffer,
                                                             kCVAttachmentMode_ShouldPropagate);
        NSLog(@"%@", attachments);
        outputdone = YES;
    }
#endif

    if (status != noErr) {
        msg_Warn(p_dec, "decoding of a frame failed (%i, %u)", status, (unsigned int) infoFlags);
        return;
    }

1129
    if (imageBuffer == nil)
1130 1131 1132 1133
        return;

    if (infoFlags & kVTDecodeInfo_FrameDropped) {
        msg_Dbg(p_dec, "decoder dropped frame");
1134 1135 1136
        if (imageBuffer != nil)
            CFRelease(imageBuffer);
        imageBuffer = nil;
1137 1138 1139
        return;
    }

1140
    NSString *timeStamp = nil;
1141 1142

    if (CMTIME_IS_VALID(pts))
1143
        timeStamp = [[NSNumber numberWithLongLong:pts.value] stringValue];
1144 1145 1146 1147 1148 1149
    else {
        msg_Dbg(p_dec, "invalid timestamp, dropping frame");
        CFRelease(imageBuffer);
        return;
    }

1150 1151 1152 1153 1154 1155 1156
    if (timeStamp) {
        id imageBufferObject = (__bridge id)imageBuffer;
        @synchronized(p_sys->outputTimeStamps) {
            [p_sys->outputTimeStamps addObject:timeStamp];
        }
        @synchronized(p_sys->outputFrames) {
            [p_sys->outputFrames setObject:imageBufferObject forKey:timeStamp];
1157 1158 1159
        }
    }
}