Diff
Modified: trunk/Source/ThirdParty/libwebrtc/ChangeLog (265072 => 265073)
--- trunk/Source/ThirdParty/libwebrtc/ChangeLog 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/ThirdParty/libwebrtc/ChangeLog 2020-07-30 01:01:04 UTC (rev 265073)
@@ -1,3 +1,27 @@
+2020-07-29 Jer Noble <jer.no...@apple.com>
+
+ Support HDR decode in SW VP9
+ https://bugs.webkit.org/show_bug.cgi?id=214928
+
+ Reviewed by Eric Carlson.
+
+ Support converting I010 buffers (full-planar, 10-bit data packed into the LSB of a 16-bit int)
+ into CVPixelBuffers (bi-planar, 10-bit data packed into the MSB of a 16-bit int). This requires
+ using functions from libyuv to merge and scale 16-bit planar data, optimized for AVX2. To know
+ that incoming buffers are 10-bit, and whether they're full-range, parse the 'vpcC' atom attached
+ to the CMFormatDescription.
+
+ * Source/webrtc/sdk/WebKit/WebKitUtilities.mm:
+ (webrtc::MergeUVPlane_16):
+ (webrtc::CopyPlane_16):
+ (webrtc::CopyVideoFrameToPixelBuffer):
+ (webrtc::pixelBufferFromFrame):
+ * Source/webrtc/sdk/WebKit/WebKitVP9Decoder.cpp:
+ (webrtc::createWebKitVP9Decoder):
+ (webrtc::startVP9DecoderSession):
+ (webrtc::WebKitVP9DecoderReceiver::createPixelBufferPoolForFormatDescription):
+ (webrtc::WebKitVP9DecoderReceiver::Decoded):
+
2020-07-28 Youenn Fablet <you...@apple.com>
Disable low latency code path for H264 constrained baseline
Modified: trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitUtilities.mm (265072 => 265073)
--- trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitUtilities.mm 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitUtilities.mm 2020-07-30 01:01:04 UTC (rev 265073)
@@ -27,6 +27,8 @@
#include "native/src/objc_frame_buffer.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "libyuv/cpu_id.h"
+#include "libyuv/row.h"
#include "Framework/Headers/WebRTC/RTCVideoFrameBuffer.h"
namespace webrtc {
@@ -77,14 +79,135 @@
return true;
}
+// MergeUVPlane_16 merges two separate U and V planes into a single, interleaved
+// plane, while simultaneously scaling the output. Use '64' in the scale field to
+// shift 10-bit data from the LSB of the 16-bit int to the MSB.
+// NOTE: This method is based on libyuv::MergeUVPlane. If libyuv ever supports
+// this operation directly, we should replace the below with it.
+// NOTE: libyuv only has an opmitization of MergeUVRow_16 for AVX2 intrinsics.
+// Calling this method on a CPU without AVX2 will fall back to a standard C
+// implementation, and will probably be super slow. Add new MergeUVRow_16
+// implementations as they become available in libyuv.
+void MergeUVPlane_16(const uint16_t* src_u, int src_stride_u, const uint16_t* src_v, int src_stride_v, uint16_t* dst_uv, int dst_stride_uv, int width, int height, int scale) {
+ void (*MergeUVRow_16)(const uint16_t* src_u, const uint16_t* src_v, uint16_t* dst_uv, int scale, int width) = libyuv::MergeUVRow_16_C;
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst_uv = dst_uv + (height - 1) * dst_stride_uv;
+ dst_stride_uv = -dst_stride_uv;
+ }
+ // Coalesce rows.
+ if (src_stride_u == width && src_stride_v == width && dst_stride_uv == width * 2) {
+ width *= height;
+ height = 1;
+ src_stride_u = src_stride_v = dst_stride_uv = 0;
+ }
+#if defined(HAS_MERGEUVROW_16_AVX2)
+ if (libyuv::TestCpuFlag(libyuv::kCpuHasAVX2))
+ MergeUVRow_16 = libyuv::MergeUVRow_16_AVX2;
+#endif
+
+ for (int y = 0; y < height; ++y) {
+ // Merge a row of U and V into a row of UV.
+ MergeUVRow_16(src_u, src_v, dst_uv, scale, width);
+ src_u += src_stride_u / sizeof(uint16_t);
+ src_v += src_stride_v / sizeof(uint16_t);
+ dst_uv += dst_stride_uv / sizeof(uint16_t);
+ }
+}
+
+// CopyPlane_16 will copy a plane of 16-bit data from one location to another,
+// while simultaneously scaling the output. Use '64' in the scale field to
+// shift 10-bit data from the LSB of a 16-bit int to the MSB.
+// NOTE: This method is based on MergeUVPlane_16 above, but operates on a
+// single plane, rater than interleaving two planes. If libyuv ever supports
+// this operation directly, we should replace the below with it.
+// NOTE: libyuv only has an opmitization of MergeUVRow_16 for AVX2 intrinsics.
+// Calling this method on a CPU without AVX2 will fall back to a standard C
+// implementation, and will probably be super slow. Add new MergeUVRow_16
+// implementations as they become available in libyuv.
+void CopyPlane_16(const uint16_t* src, int src_stride, uint16_t* dst, int dst_stride, int width, int height, int scale)
+{
+ void (*MultiplyRow_16)(const uint16_t* src_y, uint16_t* dst_y, int scale, int width) = libyuv::MultiplyRow_16_C;
+ // Negative height means invert the image.
+ if (height < 0) {
+ height = -height;
+ dst = dst + (height - 1) * dst_stride;
+ dst_stride = -dst_stride;
+ }
+ // Coalesce rows.
+ if (src_stride == width && dst_stride == width * 2) {
+ width *= height;
+ height = 1;
+ src_stride = dst_stride = 0;
+ }
+#if defined(HAS_MERGEUVROW_16_AVX2)
+ if (libyuv::TestCpuFlag(libyuv::kCpuHasAVX2))
+ MultiplyRow_16 = libyuv::MultiplyRow_16_AVX2;
+#endif
+
+ for (int y = 0; y < height; ++y) {
+ MultiplyRow_16(src, dst, scale, width);
+ src += src_stride / sizeof(uint16_t);
+ dst += dst_stride / sizeof(uint16_t);
+ }
+}
+
+static bool CopyVideoFrameToPixelBuffer(const webrtc::I010BufferInterface* frame, CVPixelBufferRef pixel_buffer)
+{
+ RTC_DCHECK(pixel_buffer);
+ RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) == kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange || CVPixelBufferGetPixelFormatType(pixel_buffer) == kCVPixelFormatType_420YpCbCr10BiPlanarFullRange);
+ RTC_DCHECK_EQ(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0), static_cast<size_t>(frame->height()));
+ RTC_DCHECK_EQ(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0), static_cast<size_t>(frame->width()));
+
+ if (CVPixelBufferLockBaseAddress(pixel_buffer, 0) != kCVReturnSuccess)
+ return false;
+
+ auto src_y = const_cast<uint16_t*>(frame->DataY());
+ auto src_u = const_cast<uint16_t*>(frame->DataU());
+ auto src_v = const_cast<uint16_t*>(frame->DataV());
+ auto src_width_y = frame->width();
+ auto src_height_y = frame->height();
+ auto src_stride_y = frame->StrideY() * sizeof(uint16_t);
+ auto src_width_uv = frame->ChromaWidth();
+ auto src_height_uv = frame->ChromaHeight();
+ auto src_stride_u = frame->StrideU() * sizeof(uint16_t);
+ auto src_stride_v = frame->StrideV() * sizeof(uint16_t);
+
+ auto* dst_y = reinterpret_cast<uint16_t*>(CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
+ auto dst_stride_y = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
+ auto dst_width_y = CVPixelBufferGetWidthOfPlane(pixel_buffer, 0);
+ auto dst_height_y = CVPixelBufferGetHeightOfPlane(pixel_buffer, 0);
+
+ auto* dst_uv = reinterpret_cast<uint16_t*>(CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
+ auto dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
+ auto dst_width_uv = CVPixelBufferGetWidthOfPlane(pixel_buffer, 1);
+ auto dst_height_uv = CVPixelBufferGetHeightOfPlane(pixel_buffer, 1);
+
+ if (src_width_y != dst_width_y
+ || src_height_y != dst_height_y
+ || src_width_uv != dst_width_uv
+ || src_height_uv != dst_height_uv)
+ return false;
+
+ CopyPlane_16(src_y, src_stride_y, dst_y, dst_stride_y, dst_width_y, dst_height_y, 64);
+ MergeUVPlane_16(src_u, src_stride_u, src_v, src_stride_v, dst_uv, dst_stride_uv, dst_width_uv, dst_height_uv, 64);
+
+ CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
+ return true;
+}
+
CVPixelBufferRef pixelBufferFromFrame(const VideoFrame& frame, const std::function<CVPixelBufferRef(size_t, size_t)>& makePixelBuffer)
{
if (frame.video_frame_buffer()->type() != VideoFrameBuffer::Type::kNative) {
- rtc::scoped_refptr<const I420BufferInterface> buffer = frame.video_frame_buffer()->GetI420();
+ auto pixelBuffer = makePixelBuffer(frame.video_frame_buffer()->width(), frame.video_frame_buffer()->height());
+ if (!pixelBuffer)
+ return nullptr;
- auto pixelBuffer = makePixelBuffer(buffer->width(), buffer->height());
- if (pixelBuffer)
+ if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kI420)
CopyVideoFrameToPixelBuffer(frame.video_frame_buffer()->GetI420(), pixelBuffer);
+ else if (frame.video_frame_buffer()->type() == VideoFrameBuffer::Type::kI010)
+ CopyVideoFrameToPixelBuffer(frame.video_frame_buffer()->GetI010(), pixelBuffer);
return pixelBuffer;
}
Modified: trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitVP9Decoder.cpp (265072 => 265073)
--- trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitVP9Decoder.cpp 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitVP9Decoder.cpp 2020-07-30 01:01:04 UTC (rev 265073)
@@ -58,6 +58,8 @@
void setCurrentFrame(VTVideoDecoderFrame currentFrame) { m_currentFrame = currentFrame; }
OSStatus decoderFailed(int error);
+ void createPixelBufferPoolForFormatDescription(CMFormatDescriptionRef);
+
private:
int32_t Decoded(VideoFrame&) final;
int32_t Decoded(VideoFrame&, int64_t decode_time_ms) final;
@@ -169,6 +171,7 @@
decoder->m_instance = std::make_unique<VP9DecoderImpl>();
decoder->m_receiver = std::make_unique<WebKitVP9DecoderReceiver>(session);
+ decoder->m_receiver->createPixelBufferPoolForFormatDescription(formatDescription);
decoder->m_instance->RegisterDecodeCompleteCallback(decoder->m_receiver.get());
@@ -253,6 +256,93 @@
CFRelease(m_pixelBufferPool);
}
+void WebKitVP9DecoderReceiver::createPixelBufferPoolForFormatDescription(CMFormatDescriptionRef formatDescription)
+{
+ // CoreAnimation doesn't support full-planar YUV, so we must convert the buffers output
+ // by libvpx to bi-planar YUV. Create pixel buffer attributes and give those to the
+ // decoder session for use in creating its own internal CVPixelBufferPool, which we
+ // will use post-decode.
+ bool isFullRange = false;
+ bool is10Bit = false;
+
+ do {
+ auto extensions = CMFormatDescriptionGetExtensions(formatDescription);
+ if (!extensions)
+ break;
+
+ CFTypeRef extensionAtoms = CFDictionaryGetValue(extensions, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms);
+ if (!extensionAtoms || CFGetTypeID(extensionAtoms) != CFDictionaryGetTypeID())
+ break;
+
+ auto configurationRecord = static_cast<CFDataRef>(CFDictionaryGetValue((CFDictionaryRef)extensionAtoms, CFSTR("vpcC")));
+ if (!configurationRecord || CFGetTypeID(configurationRecord) != CFDataGetTypeID())
+ break;
+
+ auto configurationRecordSize = CFDataGetLength(configurationRecord);
+ if (configurationRecordSize < 12)
+ break;
+
+ auto configurationRecordData = CFDataGetBytePtr(configurationRecord);
+ auto bitDepthChromaAndRange = *(configurationRecordData + 6);
+
+ if ((bitDepthChromaAndRange >> 4) == 10)
+ is10Bit = true;
+
+ if (bitDepthChromaAndRange & 0x1)
+ isFullRange = true;
+ } while (false);
+
+ OSType pixelFormat;
+ if (is10Bit)
+ pixelFormat = isFullRange ? kCVPixelFormatType_420YpCbCr10BiPlanarFullRange : kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange;
+ else
+ pixelFormat = isFullRange ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+
+ auto createPixelFormatAttributes = [] (OSType pixelFormat, int32_t borderPixels) {
+ auto createNumber = [] (int32_t format) -> CFNumberRef {
+ return CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &format);
+ };
+ auto cfPixelFormats = CFArrayCreateMutable(kCFAllocatorDefault, 2, &kCFTypeArrayCallBacks);
+ auto formatNumber = createNumber(pixelFormat);
+ CFArrayAppendValue(cfPixelFormats, formatNumber);
+ CFRelease(formatNumber);
+
+ auto borderPixelsValue = createNumber(32);
+
+ const void* keys[] = {
+ kCVPixelBufferPixelFormatTypeKey,
+ kCVPixelBufferExtendedPixelsLeftKey,
+ kCVPixelBufferExtendedPixelsRightKey,
+ kCVPixelBufferExtendedPixelsTopKey,
+ kCVPixelBufferExtendedPixelsBottomKey,
+ };
+ const void* values[] = {
+ cfPixelFormats,
+ borderPixelsValue,
+ borderPixelsValue,
+ borderPixelsValue,
+ borderPixelsValue,
+ };
+ auto attributes = CFDictionaryCreate(kCFAllocatorDefault, keys, values, std::size(keys), &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
+ CFRelease(borderPixelsValue);
+ CFRelease(cfPixelFormats);
+ return attributes;
+ };
+
+ auto pixelBufferAttributes = createPixelFormatAttributes(pixelFormat, 32);
+ VTDecoderSessionSetPixelBufferAttributes(m_session, pixelBufferAttributes);
+ CFRelease(pixelBufferAttributes);
+
+ if (m_pixelBufferPool) {
+ CFRelease(m_pixelBufferPool);
+ m_pixelBufferPool = nullptr;
+ }
+
+ m_pixelBufferPool = VTDecoderSessionGetPixelBufferPool(m_session);
+ if (m_pixelBufferPool)
+ CFRetain(m_pixelBufferPool);
+}
+
OSStatus WebKitVP9DecoderReceiver::decoderFailed(int error)
{
OSStatus vtError = kVTVideoDecoderBadDataErr;
@@ -271,33 +361,17 @@
int32_t WebKitVP9DecoderReceiver::Decoded(VideoFrame& frame)
{
- CVPixelBufferRef newPixelBuffer { nullptr };
- auto pixelBuffer = pixelBufferFromFrame(frame, [this, &newPixelBuffer](size_t width, size_t height) -> CVPixelBufferRef {
- if (!m_pixelBufferPool || m_pixelBufferWidth != width || m_pixelBufferHeight != height) {
- if (m_pixelBufferPool)
- CFRelease(m_pixelBufferPool);
- m_pixelBufferPool = createPixelBufferPool(width, height);
- if (!m_pixelBufferPool) {
- RTC_LOG(LS_ERROR) << "VP9 decoder: unable to create pixel buffer pool";
- return nullptr;
- }
- m_pixelBufferWidth = width;
- m_pixelBufferHeight = height;
- }
-
- auto status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_pixelBufferPool, &newPixelBuffer);
- if (status != kCVReturnSuccess) {
- RTC_LOG(LS_ERROR) << "VP9 decoder: unable to create pixel buffer from pool";
- return nullptr;
- }
-
- return newPixelBuffer;
+ auto pixelBuffer = pixelBufferFromFrame(frame, [this](size_t width, size_t height) -> CVPixelBufferRef {
+ CVPixelBufferRef pixelBuffer = nullptr;
+ if (CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, m_pixelBufferPool, &pixelBuffer) == kCVReturnSuccess)
+ return pixelBuffer;
+ return nullptr;
});
VTDecoderSessionEmitDecodedFrame(m_session, m_currentFrame, pixelBuffer ? noErr : -1, 0, pixelBuffer);
m_currentFrame = nullptr;
- if (newPixelBuffer)
- CFRelease(newPixelBuffer);
+ if (pixelBuffer)
+ CFRelease(pixelBuffer);
return 0;
}
Modified: trunk/Source/WebCore/ChangeLog (265072 => 265073)
--- trunk/Source/WebCore/ChangeLog 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/ChangeLog 2020-07-30 01:01:04 UTC (rev 265073)
@@ -1,3 +1,24 @@
+2020-07-29 Jer Noble <jer.no...@apple.com>
+
+ Support HDR decode in SW VP9
+ https://bugs.webkit.org/show_bug.cgi?id=214928
+
+ Reviewed by Eric Carlson.
+
+ Convert the incoming properties parsed from the VP9 header into extensions to our
+ CMFormatDescription attached to each incoming video fram.
+
+ Drive-by fix: Files in the wild will have incorrect values for whether a given
+ frame is a keyframe or not. Trust the VP9 header parser rather than the container
+ in this situations.
+
+ * platform/graphics/cocoa/SourceBufferParserWebM.cpp:
+ (WebCore::convertToCMColorPrimaries):
+ (WebCore::convertToCMTransferFunction):
+ (WebCore::convertToCMYCbCRMatrix):
+ (WebCore::createFormatDescriptionFromVP9HeaderParser):
+ (WebCore::SourceBufferParserWebM::OnFrame):
+
2020-07-29 Wenson Hsieh <wenson_hs...@apple.com>
[iPadOS] Custom dropdown menu dismisses immediately on account.nhl.com
Modified: trunk/Source/WebCore/PAL/ChangeLog (265072 => 265073)
--- trunk/Source/WebCore/PAL/ChangeLog 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/PAL/ChangeLog 2020-07-30 01:01:04 UTC (rev 265073)
@@ -1,3 +1,14 @@
+2020-07-29 Jer Noble <jer.no...@apple.com>
+
+ Support HDR decode in SW VP9
+ https://bugs.webkit.org/show_bug.cgi?id=214928
+ <rdar://problem/66284848>
+
+ Reviewed by Eric Carlson.
+
+ * pal/cf/CoreMediaSoftLink.cpp:
+ * pal/cf/CoreMediaSoftLink.h:
+
2020-07-28 Jonathan Bedard <jbed...@apple.com>
[Big Sur] Wrap SPI in feature guards (Follow-up fix)
Modified: trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.cpp (265072 => 265073)
--- trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.cpp 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.cpp 2020-07-30 01:01:04 UTC (rev 265073)
@@ -64,6 +64,16 @@
SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMTimeRangeEqual, Boolean, (CMTimeRange range1, CMTimeRange range2), (range1, range2), PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_DCI_P3, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_ITU_R_2020, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_P3_D65, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionExtension_FullRangeVideo, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_ITU_R_2020, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_Linear, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1, CFStringRef, PAL_EXPORT)
+SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionYCbCrMatrix_ITU_R_2020, CFStringRef, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMTextMarkupAlignmentType_End, CFStringRef, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMTextMarkupAlignmentType_Middle, CFStringRef, PAL_EXPORT)
SOFT_LINK_CONSTANT_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMTextMarkupAlignmentType_Start, CFStringRef, PAL_EXPORT)
@@ -176,7 +186,7 @@
SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (* CMSAMPLEBUFFERCALL_NOESCAPE callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon), PAL_EXPORT)
SOFT_LINK_FUNCTION_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, CMSampleBufferCallBlockForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (^ CMSAMPLEBUFFERCALL_NOESCAPE handler)(CMSampleBufferRef, CMItemCount)), (sbuf, handler), PAL_EXPORT)
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionExtension_ProtectedContentOriginalFormat, CFStringRef, PAL_EXPORT)
-
+SOFT_LINK_CONSTANT_MAY_FAIL_FOR_SOURCE_WITH_EXPORT(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_sRGB, CFStringRef, PAL_EXPORT)
#endif // PLATFORM(COCOA)
#if PLATFORM(IOS_FAMILY)
Modified: trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.h (265072 => 265073)
--- trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.h 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/PAL/pal/cf/CoreMediaSoftLink.h 2020-07-30 01:01:04 UTC (rev 265073)
@@ -86,8 +86,30 @@
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms, CFStringRef)
#define kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms get_CoreMedia_kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_DCI_P3, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_DCI_P3 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_DCI_P3()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_ITU_R_2020, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_ITU_R_2020 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_ITU_R_2020()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionColorPrimaries_P3_D65, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_P3_D65 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionColorPrimaries_P3_D65()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionExtension_FullRangeVideo, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionExtension_FullRangeVideo get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionExtension_FullRangeVideo()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_ITU_R_2020, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_ITU_R_2020 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_ITU_R_2020()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_Linear, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_Linear get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_Linear()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1()
+SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionYCbCrMatrix_ITU_R_2020, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionYCbCrMatrix_ITU_R_2020 get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionYCbCrMatrix_ITU_R_2020()
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionExtension_ProtectedContentOriginalFormat, CFStringRef)
#define kCMFormatDescriptionExtension_ProtectedContentOriginalFormat get_CoreMedia_kCMFormatDescriptionExtension_ProtectedContentOriginalFormat()
+SOFT_LINK_CONSTANT_MAY_FAIL_FOR_HEADER(PAL, CoreMedia, kCMFormatDescriptionTransferFunction_sRGB, CFStringRef)
+#define kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_sRGB get_CoreMedia_kCMFormatDescriptionExtension_kCMFormatDescriptionTransferFunction_sRGB()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMTextMarkupAlignmentType_End, CFStringRef)
#define kCMTextMarkupAlignmentType_End get_CoreMedia_kCMTextMarkupAlignmentType_End()
SOFT_LINK_CONSTANT_FOR_HEADER(PAL, CoreMedia, kCMTextMarkupAlignmentType_Middle, CFStringRef)
Modified: trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.cpp (265072 => 265073)
--- trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.cpp 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.cpp 2020-07-30 01:01:04 UTC (rev 265073)
@@ -60,6 +60,13 @@
SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_ITU_R_709_2, CFStringRef)
SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_ITU_R_601_4, CFStringRef)
SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_SMPTE_240M_1995, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferColorPrimaries_EBU_3213, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferColorPrimaries_ITU_R_709_2, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferColorPrimaries_SMPTE_C, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferColorPrimariesKey, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferTransferFunctionKey, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferTransferFunction_ITU_R_709_2, CFStringRef)
+SOFT_LINK_CONSTANT_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferTransferFunction_SMPTE_240M_1995, CFStringRef)
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_DCI_P3, CFStringRef)
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_P3_D65, CFStringRef)
SOFT_LINK_CONSTANT_MAY_FAIL_FOR_SOURCE(WebCore, CoreVideo, kCVImageBufferYCbCrMatrix_ITU_R_2020, CFStringRef)
Modified: trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.h (265072 => 265073)
--- trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.h 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/platform/cocoa/CoreVideoSoftLink.h 2020-07-30 01:01:04 UTC (rev 265073)
@@ -95,6 +95,20 @@
#define kCVPixelBufferCGBitmapContextCompatibilityKey get_CoreVideo_kCVPixelBufferCGBitmapContextCompatibilityKey()
SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVPixelBufferCGImageCompatibilityKey, CFStringRef)
#define kCVPixelBufferCGImageCompatibilityKey get_CoreVideo_kCVPixelBufferCGImageCompatibilityKey()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferColorPrimaries_EBU_3213, CFStringRef)
+#define kCVImageBufferColorPrimaries_EBU_3213 get_CoreVideo_kCVImageBufferColorPrimaries_EBU_3213()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferColorPrimaries_ITU_R_709_2, CFStringRef)
+#define kCVImageBufferColorPrimaries_ITU_R_709_2 get_CoreVideo_kCVImageBufferColorPrimaries_ITU_R_709_2()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferColorPrimaries_SMPTE_C, CFStringRef)
+#define kCVImageBufferColorPrimaries_SMPTE_C get_CoreVideo_kCVImageBufferColorPrimaries_SMPTE_C()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferColorPrimariesKey, CFStringRef)
+#define kCVImageBufferColorPrimariesKey get_CoreVideo_kCVImageBufferColorPrimariesKey()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferTransferFunctionKey, CFStringRef)
+#define kCVImageBufferTransferFunctionKey get_CoreVideo_kCVImageBufferTransferFunctionKey()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferTransferFunction_ITU_R_709_2, CFStringRef)
+#define kCVImageBufferTransferFunction_ITU_R_709_2 get_CoreVideo_kCVImageBufferTransferFunction_ITU_R_709_2()
+SOFT_LINK_CONSTANT_FOR_HEADER(WebCore, CoreVideo, kCVImageBufferTransferFunction_SMPTE_240M_1995, CFStringRef)
+#define kCVImageBufferTransferFunction_SMPTE_240M_1995 get_CoreVideo_kCVImageBufferTransferFunction_SMPTE_240M_1995()
#if USE(OPENGL_ES)
SOFT_LINK_FUNCTION_FOR_HEADER(WebCore, CoreVideo, CVOpenGLESTextureCacheCreate, CVReturn, (CFAllocatorRef allocator, CFDictionaryRef cacheAttributes, CVEAGLContext eaglContext, CFDictionaryRef textureAttributes, CVOpenGLESTextureCacheRef* cacheOut), (allocator, cacheAttributes, eaglContext, textureAttributes, cacheOut))
Modified: trunk/Source/WebCore/platform/graphics/cocoa/SourceBufferParserWebM.cpp (265072 => 265073)
--- trunk/Source/WebCore/platform/graphics/cocoa/SourceBufferParserWebM.cpp 2020-07-30 01:00:22 UTC (rev 265072)
+++ trunk/Source/WebCore/platform/graphics/cocoa/SourceBufferParserWebM.cpp 2020-07-30 01:01:04 UTC (rev 265073)
@@ -29,6 +29,7 @@
#if ENABLE(MEDIA_SOURCE)
#include "AudioTrackPrivateWebM.h"
+#include "CoreVideoSoftLink.h"
#include "InbandTextTrackPrivate.h"
#include "MediaDescription.h"
#include "MediaSampleAVFObjC.h"
@@ -38,6 +39,7 @@
#include "VP9UtilitiesCocoa.h"
#include "VideoTrackPrivateWebM.h"
#include <_javascript_Core/DataView.h>
+#include <pal/cf/CoreMediaSoftLink.h>
#include <webm/webm_parser.h>
#include <wtf/Algorithms.h>
#include <wtf/Deque.h>
@@ -49,6 +51,8 @@
namespace WebCore {
+using namespace PAL;
+
static bool isWebmParserAvailable()
{
return !!webm::swap && RuntimeEnabledFeatures::sharedFeatures().webMParserEnabled();
@@ -533,6 +537,27 @@
}
}
+static CFStringRef convertToCMColorPrimaries(uint8_t primaries)
+{
+ switch (primaries) {
+ case VPConfigurationColorPrimaries::BT_709_6:
+ return kCVImageBufferColorPrimaries_ITU_R_709_2;
+ case VPConfigurationColorPrimaries::EBU_Tech_3213_E:
+ return kCVImageBufferColorPrimaries_EBU_3213;
+ case VPConfigurationColorPrimaries::BT_601_7:
+ case VPConfigurationColorPrimaries::SMPTE_ST_240:
+ return kCVImageBufferColorPrimaries_SMPTE_C;
+ case VPConfigurationColorPrimaries::SMPTE_RP_431_2:
+ return kCMFormatDescriptionColorPrimaries_DCI_P3;
+ case VPConfigurationColorPrimaries::SMPTE_EG_432_1:
+ return kCMFormatDescriptionColorPrimaries_P3_D65;
+ case VPConfigurationColorPrimaries::BT_2020_Nonconstant_Luminance:
+ return kCMFormatDescriptionColorPrimaries_ITU_R_2020;
+ }
+
+ return nullptr;
+}
+
static uint8_t convertToTransferCharacteristics(const TransferCharacteristics& characteristics)
{
switch (characteristics) {
@@ -573,6 +598,31 @@
}
}
+static CFStringRef convertToCMTransferFunction(uint8_t characteristics)
+{
+ switch (characteristics) {
+ case VPConfigurationTransferCharacteristics::BT_709_6:
+ return kCVImageBufferTransferFunction_ITU_R_709_2;
+ case VPConfigurationTransferCharacteristics::SMPTE_ST_240:
+ return kCVImageBufferTransferFunction_SMPTE_240M_1995;
+ case VPConfigurationTransferCharacteristics::SMPTE_ST_2084:
+ return kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ;
+ case VPConfigurationTransferCharacteristics::BT_2020_10bit:
+ case VPConfigurationTransferCharacteristics::BT_2020_12bit:
+ return kCMFormatDescriptionTransferFunction_ITU_R_2020;
+ case VPConfigurationTransferCharacteristics::SMPTE_ST_428_1:
+ return kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1;
+ case VPConfigurationTransferCharacteristics::BT_2100_HLG:
+ return kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG;
+ case VPConfigurationTransferCharacteristics::IEC_61966_2_1:
+ return PAL::canLoad_CoreMedia_kCMFormatDescriptionTransferFunction_sRGB() ? get_CoreMedia_kCMFormatDescriptionTransferFunction_sRGB() : nullptr;
+ case VPConfigurationTransferCharacteristics::Linear:
+ return kCMFormatDescriptionTransferFunction_Linear;
+ }
+
+ return nullptr;
+}
+
static uint8_t convertToMatrixCoefficients(const MatrixCoefficients& coefficients)
{
switch (coefficients) {
@@ -599,6 +649,23 @@
}
}
+static CFStringRef convertToCMYCbCRMatrix(uint8_t coefficients)
+{
+ switch (coefficients) {
+ case VPConfigurationMatrixCoefficients::BT_2020_Nonconstant_Luminance:
+ return kCMFormatDescriptionYCbCrMatrix_ITU_R_2020;
+ case VPConfigurationMatrixCoefficients::BT_470_7_BG:
+ case VPConfigurationMatrixCoefficients::BT_601_7:
+ return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
+ case VPConfigurationMatrixCoefficients::BT_709_6:
+ return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
+ case VPConfigurationMatrixCoefficients::SMPTE_ST_240:
+ return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
+ }
+
+ return nullptr;
+}
+
static uint8_t convertSubsamplingXYToChromaSubsampling(uint64_t x, uint64_t y)
{
if (x & y)
@@ -691,12 +758,33 @@
CFTypeRef configurationValues[] = { data.get() };
auto configurationDict = adoptCF(CFDictionaryCreate(kCFAllocatorDefault, configurationKeys, configurationValues, WTF_ARRAY_LENGTH(configurationKeys), &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
- Vector<CFTypeRef> atomsKeys { kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms };
- Vector<CFTypeRef> atomsValues = { configurationDict.get() };
- auto atoms = adoptCF(CFDictionaryCreate(kCFAllocatorDefault, atomsKeys.data(), atomsValues.data(), atomsKeys.size(), &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
+ Vector<CFTypeRef> extensionsKeys { kCMFormatDescriptionExtension_SampleDescriptionExtensionAtoms };
+ Vector<CFTypeRef> extensionsValues = { configurationDict.get() };
+ if (record.videoFullRangeFlag == VPConfigurationRange::FullRange) {
+ extensionsKeys.append(kCMFormatDescriptionExtension_FullRangeVideo);
+ extensionsValues.append(kCFBooleanTrue);
+ }
+
+ if (auto cmColorPrimaries = convertToCMColorPrimaries(record.colorPrimaries)) {
+ extensionsKeys.append(kCVImageBufferColorPrimariesKey);
+ extensionsValues.append(cmColorPrimaries);
+ }
+
+ if (auto cmTransferFunction = convertToCMTransferFunction(record.transferCharacteristics)) {
+ extensionsKeys.append(kCVImageBufferTransferFunctionKey);
+ extensionsValues.append(cmTransferFunction);
+ }
+
+ if (auto cmMatrix = convertToCMYCbCRMatrix(record.matrixCoefficients)) {
+ extensionsKeys.append(kCVImageBufferYCbCrMatrixKey);
+ extensionsValues.append(cmMatrix);
+ }
+
+ auto extensions = adoptCF(CFDictionaryCreate(kCFAllocatorDefault, extensionsKeys.data(), extensionsValues.data(), extensionsKeys.size(), &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
+
CMVideoFormatDescriptionRef formatDescription = nullptr;
- if (noErr != CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCMVideoCodecType_VP9, parser.width(), parser.height(), atoms.get(), &formatDescription))
+ if (noErr != CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCMVideoCodecType_VP9, parser.width(), parser.height(), extensions.get(), &formatDescription))
return nullptr;
return adoptCF(formatDescription);
}
@@ -721,12 +809,6 @@
if (!block)
return Status(Status::kInvalidElementId);
- auto isSync = WTF::switchOn(*m_currentBlock, [](Block&) {
- return false;
- }, [](SimpleBlock& block) {
- return block.is_key_frame;
- });
-
auto trackNumber = block->track_number;
auto* trackData = trackDataForTrackNumber(trackNumber);
@@ -798,7 +880,7 @@
trackData->currentBlockBuffer = nullptr;
trackData->currentBlockBufferPosition = 0;
- if (!isSync) {
+ if (!headerParser.key()) {
auto attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer.get(), true);
ASSERT(attachmentsArray);
if (!attachmentsArray)