Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h (160809 => 160810)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h 2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.h 2013-12-19 01:14:26 UTC (rev 160810)
@@ -31,8 +31,10 @@
#include "MediaPlayerPrivate.h"
#include "SourceBufferPrivateClient.h"
#include <wtf/MediaTime.h>
+#include <wtf/Vector.h>
OBJC_CLASS AVAsset;
+OBJC_CLASS AVSampleBufferAudioRenderer;
OBJC_CLASS AVSampleBufferDisplayLayer;
typedef struct OpaqueCMTimebase* CMTimebaseRef;
@@ -52,6 +54,9 @@
void addDisplayLayer(AVSampleBufferDisplayLayer*);
void removeDisplayLayer(AVSampleBufferDisplayLayer*);
+ void addAudioRenderer(AVSampleBufferAudioRenderer*);
+ void removeAudioRenderer(AVSampleBufferAudioRenderer*);
+
virtual MediaPlayer::NetworkState networkState() const OVERRIDE;
virtual MediaPlayer::ReadyState readyState() const OVERRIDE;
void setReadyState(MediaPlayer::ReadyState);
@@ -82,6 +87,10 @@
virtual bool paused() const OVERRIDE;
+ virtual void setVolume(float volume) OVERRIDE;
+ virtual bool supportsMuting() const OVERRIDE { return true; }
+ virtual void setMuted(bool) OVERRIDE;
+
virtual bool supportsScanning() const OVERRIDE;
virtual IntSize naturalSize() const OVERRIDE;
@@ -151,6 +160,7 @@
RefPtr<MediaSourcePrivateAVFObjC> m_mediaSourcePrivate;
RetainPtr<AVAsset> m_asset;
RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
+ Vector<RetainPtr<AVSampleBufferAudioRenderer>> m_sampleBufferAudioRenderers;
std::unique_ptr<PlatformClockCM> m_clock;
MediaPlayer::NetworkState m_networkState;
MediaPlayer::ReadyState m_readyState;
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm (160809 => 160810)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm 2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm 2013-12-19 01:14:26 UTC (rev 160810)
@@ -48,10 +48,13 @@
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
+SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
+SOFT_LINK(CoreMedia, FigReadOnlyTimebaseSetTargetTimebase, OSStatus, (CMTimebaseRef timebase, CMTimebaseRef newTargetTimebase), (timebase, newTargetTimebase))
+
#pragma mark -
#pragma mark AVVideoPerformanceMetrics
@@ -66,6 +69,18 @@
- (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
@end
+
+#pragma mark -
+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (CMTimebaseRef)timebase;
+- (void)setVolume:(float)volume;
+- (void)setMuted:(BOOL)muted;
+@end
+#endif
+
namespace WebCore {
#pragma mark -
@@ -101,7 +116,7 @@
bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
{
- return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass();
+ return AVFoundationLibrary() && CoreMediaLibrary() && getAVStreamDataParserClass() && getAVSampleBufferAudioRendererClass();
}
static HashSet<String> mimeTypeCache()
@@ -211,11 +226,23 @@
return !m_clock->isRunning();
}
+void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
+{
+ for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+ [*it setVolume:volume];
+}
+
bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
{
return true;
}
+void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
+{
+ for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
+ [*it setMuted:muted];
+}
+
IntSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
{
// FIXME(125156): Report the intrinsic size of the enabled video track.
@@ -468,6 +495,26 @@
m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
}
+void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+ if (m_sampleBufferAudioRenderers.contains(audioRenderer))
+ return;
+
+ m_sampleBufferAudioRenderers.append(audioRenderer);
+ FigReadOnlyTimebaseSetTargetTimebase([audioRenderer timebase], m_clock->timebase());
+ m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
}
+void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
+{
+ size_t pos = m_sampleBufferAudioRenderers.find(audioRenderer);
+ if (pos == notFound)
+ return;
+
+ m_sampleBufferAudioRenderers.remove(pos);
+ m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player);
+}
+
+}
+
#endif
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h (160809 => 160810)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h 2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.h 2013-12-19 01:14:26 UTC (rev 160810)
@@ -29,6 +29,7 @@
#if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
#include "SourceBufferPrivate.h"
+#include <map>
#include <wtf/Deque.h>
#include <wtf/HashMap.h>
#include <wtf/MediaTime.h>
@@ -39,6 +40,7 @@
OBJC_CLASS AVAsset;
OBJC_CLASS AVStreamDataParser;
+OBJC_CLASS AVSampleBufferAudioRenderer;
OBJC_CLASS AVSampleBufferDisplayLayer;
OBJC_CLASS NSError;
OBJC_CLASS NSObject;
@@ -94,13 +96,21 @@
virtual void enqueueSample(PassRefPtr<MediaSample>, AtomicString trackID) OVERRIDE;
virtual bool isReadyForMoreSamples(AtomicString trackID) OVERRIDE;
virtual void setActive(bool) OVERRIDE;
+ virtual void notifyClientWhenReadyForMoreSamples(AtomicString trackID) OVERRIDE;
+ void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferAudioRenderer*);
+ void flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>>, AVSampleBufferDisplayLayer*);
+
+ void didBecomeReadyForMoreSamples(int trackID);
+ void destroyRenderers();
+
Vector<RefPtr<VideoTrackPrivate>> m_videoTracks;
Vector<RefPtr<AudioTrackPrivate>> m_audioTracks;
RetainPtr<AVStreamDataParser> m_parser;
RetainPtr<AVAsset> m_asset;
RetainPtr<AVSampleBufferDisplayLayer> m_displayLayer;
+ std::map<int, RetainPtr<AVSampleBufferAudioRenderer>> m_audioRenderers;
RetainPtr<NSObject> m_delegate;
MediaSourcePrivateAVFObjC* m_mediaSource;
Modified: trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm (160809 => 160810)
--- trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm 2013-12-19 00:52:47 UTC (rev 160809)
+++ trunk/Source/WebCore/platform/graphics/avfoundation/objc/SourceBufferPrivateAVFObjC.mm 2013-12-19 01:14:26 UTC (rev 160810)
@@ -59,6 +59,7 @@
SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
+SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
@@ -107,9 +108,7 @@
#pragma mark -
#pragma mark AVStreamDataParser
-@class AVStreamDataParserInternal;
-NS_CLASS_AVAILABLE(TBD, TBD)
@interface AVStreamDataParser : NSObject
- (void)setDelegate:(id)delegate;
- (void)appendStreamData:(NSData *)data;
@@ -118,6 +117,21 @@
@end
#pragma mark -
+#pragma mark AVSampleBufferAudioRenderer
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED <= 1090
+@interface AVSampleBufferAudioRenderer : NSObject
+- (NSInteger)status;
+- (NSError*)error;
+- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)flush;
+- (BOOL)isReadyForMoreMediaData;
+- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
+- (void)stopRequestingMediaData;
+@end
+#endif
+
+#pragma mark -
#pragma mark WebAVStreamDataParserListener
@interface WebAVStreamDataParserListener : NSObject {
@@ -290,18 +304,13 @@
, m_mediaSource(parent)
, m_client(0)
, m_parsingSucceeded(true)
+ , m_enabledVideoTrackID(-1)
{
}
SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
{
- if (m_displayLayer) {
- if (m_mediaSource)
- m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
- [m_displayLayer flushAndRemoveImage];
- [m_displayLayer stopRequestingMediaData];
- m_displayLayer = nullptr;
- }
+ destroyRenderers();
}
void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
@@ -352,7 +361,7 @@
static OSStatus callProcessCodedFrameForEachSample(CMSampleBufferRef sampleBuffer, CMItemCount, void *refcon)
{
ProcessCodedFrameInfo* info = static_cast<ProcessCodedFrameInfo*>(refcon);
- return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType);
+ return info->sourceBuffer->processCodedFrame(info->trackID, sampleBuffer, info->mediaType) ? noErr : paramErr;
}
void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
@@ -401,7 +410,7 @@
notImplemented();
}
-void SourceBufferPrivateAVFObjC::removedFromMediaSource()
+void SourceBufferPrivateAVFObjC::destroyRenderers()
{
if (m_displayLayer) {
if (m_mediaSource)
@@ -411,6 +420,21 @@
m_displayLayer = nullptr;
}
+ for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
+ AVSampleBufferAudioRenderer* renderer = it->second.get();
+ if (m_mediaSource)
+ m_mediaSource->player()->removeAudioRenderer(renderer);
+ [renderer flush];
+ [renderer stopRequestingMediaData];
+ }
+
+ m_audioRenderers.clear();
+}
+
+void SourceBufferPrivateAVFObjC::removedFromMediaSource()
+{
+ destroyRenderers();
+
if (m_mediaSource)
m_mediaSource->removeSourceBuffer(this);
}
@@ -468,18 +492,38 @@
if (!m_displayLayer) {
m_displayLayer = [[getAVSampleBufferDisplayLayerClass() alloc] init];
[m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
- if (m_client)
- m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
+ didBecomeReadyForMoreSamples(trackID);
}];
- if (m_mediaSource)
- m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
}
+ if (m_mediaSource)
+ m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
}
}
-void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC*)
+void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
{
- // No-op.
+ int trackID = track->trackID();
+
+ if (!track->enabled()) {
+ AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
+ [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
+ if (m_mediaSource)
+ m_mediaSource->player()->removeAudioRenderer(renderer);
+ } else {
+ [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
+ AVSampleBufferAudioRenderer* renderer;
+ if (!m_audioRenderers.count(trackID)) {
+ renderer = [[getAVSampleBufferAudioRendererClass() alloc] init];
+ [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ m_audioRenderers[trackID] = renderer;
+ } else
+ renderer = m_audioRenderers[trackID].get();
+
+ if (m_mediaSource)
+ m_mediaSource->player()->addAudioRenderer(renderer);
+ }
}
static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
@@ -498,14 +542,20 @@
return adoptCF(newSampleBuffer);
}
-void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackID)
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
{
- if (trackID.toInt() != m_enabledVideoTrackID)
- return;
+ int trackID = trackIDString.toInt();
+ LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
- LOG(Media, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID.toInt());
+ if (trackID == m_enabledVideoTrackID)
+ flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
+ else if (m_audioRenderers.count(trackID))
+ flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
+}
- [m_displayLayer flush];
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
+{
+ [renderer flush];
for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
RefPtr<MediaSample>& mediaSample = *it;
@@ -515,16 +565,33 @@
RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
- [m_displayLayer enqueueSampleBuffer:sampleBuffer.get()];
+ [renderer enqueueSampleBuffer:sampleBuffer.get()];
}
+}
+void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
+{
+ [layer flush];
+
+ for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
+ RefPtr<MediaSample>& mediaSample = *it;
+
+ PlatformSample platformSample = mediaSample->platformSample();
+ ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
+
+ RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
+
+ [layer enqueueSampleBuffer:sampleBuffer.get()];
+ }
+
if (m_mediaSource)
m_mediaSource->player()->setHasAvailableVideoFrame(false);
}
-void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackID)
+void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
{
- if (trackID.toInt() != m_enabledVideoTrackID)
+ int trackID = trackIDString.toInt();
+ if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
return;
RefPtr<MediaSample> mediaSample = prpMediaSample;
@@ -533,15 +600,25 @@
if (platformSample.type != PlatformSample::CMSampleBufferType)
return;
- [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
- if (m_mediaSource)
- m_mediaSource->player()->setHasAvailableVideoFrame(true);
+ if (trackID == m_enabledVideoTrackID) {
+ [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
+ if (m_mediaSource)
+ m_mediaSource->player()->setHasAvailableVideoFrame(true);
+ } else
+ [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
}
-bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackID)
+bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
{
- UNUSED_PARAM(trackID);
- return [m_displayLayer isReadyForMoreMediaData];
+ int trackID = trackIDString.toInt();
+ if (trackID == m_enabledVideoTrackID)
+ return [m_displayLayer isReadyForMoreMediaData];
+ else if (m_audioRenderers.count(trackID))
+ return [m_audioRenderers[trackID] isReadyForMoreMediaData];
+ else
+ ASSERT_NOT_REACHED();
+
+ return false;
}
void SourceBufferPrivateAVFObjC::setActive(bool isActive)
@@ -563,6 +640,36 @@
m_client->sourceBufferPrivateSeekToTime(this, time);
}
+void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
+{
+ if (trackID == m_enabledVideoTrackID)
+ [m_displayLayer stopRequestingMediaData];
+ else if (m_audioRenderers.count(trackID))
+ [m_audioRenderers[trackID] stopRequestingMediaData];
+ else {
+ ASSERT_NOT_REACHED();
+ return;
+ }
+
+ if (m_client)
+ m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
}
+void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
+{
+ int trackID = trackIDString.toInt();
+ if (trackID == m_enabledVideoTrackID) {
+ [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ } else if (m_audioRenderers.count(trackID)) {
+ [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
+ didBecomeReadyForMoreSamples(trackID);
+ }];
+ } else
+ ASSERT_NOT_REACHED();
+}
+
+}
+
#endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)