Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp (237818 => 237819)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp 2018-11-05 19:12:38 UTC (rev 237818)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp 2018-11-05 19:15:14 UTC (rev 237819)
@@ -69,12 +69,13 @@
return rtc::scoped_refptr<webrtc::VideoFrameBuffer>(new GStreamerVideoFrameLibWebRTC(sample, info));
}
-std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation)
+std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation,
+ int64_t timestamp, int64_t renderTimeMs)
{
auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(sample));
- auto buffer = gst_sample_get_buffer(sample);
- return std::unique_ptr<webrtc::VideoFrame>(new webrtc::VideoFrame(frameBuffer, GST_BUFFER_DTS(buffer), GST_BUFFER_PTS(buffer), rotation));
+ return std::unique_ptr<webrtc::VideoFrame>(
+ new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation));
}
webrtc::VideoFrameBuffer::Type GStreamerVideoFrameLibWebRTC::type() const
Modified: trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h (237818 => 237819)
--- trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h 2018-11-05 19:12:38 UTC (rev 237818)
+++ trunk/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h 2018-11-05 19:15:14 UTC (rev 237819)
@@ -32,7 +32,7 @@
namespace WebCore {
const GRefPtr<GstSample> GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame&);
-std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample*, webrtc::VideoRotation);
+std::unique_ptr<webrtc::VideoFrame> LibWebRTCVideoFrameFromGStreamerSample(GstSample*, webrtc::VideoRotation, int64_t timestamp, int64_t renderTimeMs);
class GStreamerVideoFrameLibWebRTC : public rtc::RefCountedObject<webrtc::VideoFrameBuffer> {
public:
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp (237818 => 237819)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp 2018-11-05 19:12:38 UTC (rev 237818)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp 2018-11-05 19:15:14 UTC (rev 237819)
@@ -44,6 +44,11 @@
namespace WebCore {
+typedef struct {
+ uint64_t timestamp;
+ int64_t renderTimeMs;
+} InputTimestamps;
+
class GStreamerVideoDecoder : public webrtc::VideoDecoder {
public:
GStreamerVideoDecoder()
@@ -167,7 +172,8 @@
GST_BUFFER_PTS(buffer.get()) = (static_cast<guint64>(renderTimeMs) * GST_MSECOND) - m_firstBufferPts;
{
auto locker = holdLock(m_bufferMapLock);
- m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = inputImage.Timestamp();
+ InputTimestamps timestamps = {inputImage.Timestamp(), renderTimeMs};
+ m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = timestamps;
}
GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer.get());
@@ -228,14 +234,16 @@
auto sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
auto buffer = gst_sample_get_buffer(sample);
- {
- auto locker = holdLock(m_bufferMapLock);
- // Make sure that the frame.timestamp == previsouly input_frame._timeStamp
- // as it is required by the VideoDecoder baseclass.
- GST_BUFFER_DTS(buffer) = m_dtsPtsMap[GST_BUFFER_PTS(buffer)];
- m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer));
- }
- auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0));
+ m_bufferMapLock.lock();
+ // Make sure that the frame.timestamp == previsouly input_frame._timeStamp
+ // as it is required by the VideoDecoder baseclass.
+ auto timestamps = m_dtsPtsMap[GST_BUFFER_PTS(buffer)];
+ m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer));
+ m_bufferMapLock.unlock();
+
+ auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0,
+ timestamps.timestamp, timestamps.renderTimeMs));
+
GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
GST_LOG_OBJECT(pipeline(), "Output decoded frame! %d -> %" GST_PTR_FORMAT,
frame->timestamp(), buffer);
@@ -267,7 +275,7 @@
webrtc::DecodedImageCallback* m_imageReadyCb;
Lock m_bufferMapLock;
- StdMap<GstClockTime, GstClockTime> m_dtsPtsMap;
+ StdMap<GstClockTime, InputTimestamps> m_dtsPtsMap;
GstClockTime m_firstBufferPts;
GstClockTime m_firstBufferDts;
};
Modified: trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp (237818 => 237819)
--- trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp 2018-11-05 19:12:38 UTC (rev 237818)
+++ trunk/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp 2018-11-05 19:15:14 UTC (rev 237819)
@@ -40,9 +40,10 @@
#undef GST_USE_UNSTABLE_API
#include <gst/pbutils/encoding-profile.h>
#include <gst/video/video.h>
+#include <wtf/HashMap.h>
+#include <wtf/Lock.h>
+#include <wtf/StdMap.h>
-#include <mutex>
-
// Required for unified builds
#ifdef GST_CAT_DEFAULT
#undef GST_CAT_DEFAULT
@@ -55,10 +56,11 @@
namespace WebCore {
-typedef void (*BitrateSetter)(GstElement* encoder, uint32_t bitrate);
-static GRefPtr<GRegex> targetBitrateBitPerSec;
-static GRefPtr<GRegex> bitrateBitPerSec;
-static GRefPtr<GRegex> bitrateKBitPerSec;
+typedef struct {
+ uint64_t rtpTimestamp;
+ int64_t captureTimeMs;
+ webrtc::CodecSpecificInfo codecInfo;
+} FrameData;
class GStreamerVideoEncoder : public webrtc::VideoEncoder {
public:
@@ -65,11 +67,13 @@
GStreamerVideoEncoder(const webrtc::SdpVideoFormat&)
: m_firstFramePts(GST_CLOCK_TIME_NONE)
, m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw")))
+ , m_adapter(adoptGRef(gst_adapter_new()))
{
}
GStreamerVideoEncoder()
: m_firstFramePts(GST_CLOCK_TIME_NONE)
, m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw")))
+ , m_adapter(adoptGRef(gst_adapter_new()))
{
}
@@ -187,7 +191,7 @@
}
int32_t Encode(const webrtc::VideoFrame& frame,
- const webrtc::CodecSpecificInfo*,
+ const webrtc::CodecSpecificInfo* codecInfo,
const std::vector<webrtc::FrameType>* frameTypes) final
{
if (!m_imageReadyCb) {
@@ -211,11 +215,19 @@
gst_pad_set_offset(pad.get(), -m_firstFramePts);
}
+ webrtc::CodecSpecificInfo localCodecInfo;
+ FrameData frameData = { frame.timestamp(), frame.render_time_ms(), codecInfo ? *codecInfo : localCodecInfo };
+ {
+ auto locker = holdLock(m_bufferMapLock);
+ m_framesData.append(frameData);
+ }
+
for (auto frame_type : *frameTypes) {
if (frame_type == webrtc::kVideoFrameKey) {
auto pad = adoptGRef(gst_element_get_static_pad(m_src, "src"));
auto forceKeyUnit = gst_video_event_new_downstream_force_key_unit(GST_CLOCK_TIME_NONE,
GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, 1);
+ GST_INFO_OBJECT(m_pipeline.get(), "Requesting KEYFRAME!");
if (!gst_pad_push_event(pad.get(), forceKeyUnit))
GST_WARNING_OBJECT(pipeline(), "Could not send ForceKeyUnit event");
@@ -240,6 +252,29 @@
auto buffer = gst_sample_get_buffer(sample.get());
auto caps = gst_sample_get_caps(sample.get());
+ webrtc::CodecSpecificInfo localCodecInfo;
+ FrameData frameData = { 0, 0, localCodecInfo};
+ {
+ auto locker = holdLock(m_bufferMapLock);
+ if (!m_framesData.size()) {
+ gst_adapter_push(m_adapter.get(), gst_buffer_ref(buffer));
+
+ return GST_FLOW_OK;
+ }
+
+ if (gst_adapter_available(m_adapter.get()) > 0) {
+ uint flags = GST_BUFFER_FLAGS(buffer);
+
+ GST_INFO_OBJECT(m_pipeline.get(), "Got more buffer than pushed frame, trying to deal with it.");
+ gst_adapter_push(m_adapter.get(), gst_buffer_ref(buffer));
+
+ buffer = gst_adapter_take_buffer(m_adapter.get(), gst_adapter_available(m_adapter.get()));
+ GST_BUFFER_FLAGS(buffer) = flags;
+ }
+ frameData = m_framesData[0];
+ m_framesData.remove(static_cast<size_t>(0));
+ }
+
webrtc::RTPFragmentationHeader fragmentationInfo;
Fragmentize(&m_encodedFrame, &m_encodedImageBuffer, &m_encodedImageBufferSize, buffer, &fragmentationInfo);
if (!m_encodedFrame._size)
@@ -251,15 +286,16 @@
nullptr);
m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey;
- m_encodedFrame._completeFrame = true;
- m_encodedFrame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer));
- m_encodedFrame.SetTimestamp(GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer)));
- GST_LOG_OBJECT(m_pipeline.get(), "Got buffer TS: %" GST_TIME_FORMAT, GST_TIME_ARGS(GST_BUFFER_PTS(buffer)));
+ m_encodedFrame._completeFrame = false;
+ m_encodedFrame.capture_time_ms_ = frameData.captureTimeMs;
+ m_encodedFrame.SetTimestamp(frameData.rtpTimestamp);
- webrtc::CodecSpecificInfo codecSpecifiInfos;
- PopulateCodecSpecific(&codecSpecifiInfos, buffer);
+ GST_LOG_OBJECT(m_pipeline.get(), "Got buffer capture_time_ms: %ld _timestamp: %ld",
+ m_encodedFrame.capture_time_ms_, m_encodedFrame.Timestamp());
- webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &codecSpecifiInfos, &fragmentationInfo);
+ PopulateCodecSpecific(&frameData.codecInfo, buffer);
+
+ webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &frameData.codecInfo, &fragmentationInfo);
if (result.error != webrtc::EncodedImageCallback::Result::OK)
GST_ERROR_OBJECT(m_pipeline.get(), "Encode callback failed: %d", result.error);
@@ -294,11 +330,6 @@
}
}
- virtual const gchar* ProfileName()
- {
- return nullptr;
- }
-
virtual const gchar* Caps()
{
return nullptr;
@@ -372,6 +403,10 @@
webrtc::EncodedImage m_encodedFrame;
std::unique_ptr<uint8_t[]> m_encodedImageBuffer;
size_t m_encodedImageBufferSize;
+
+ Lock m_bufferMapLock;
+ GRefPtr<GstAdapter> m_adapter;
+ Vector<FrameData> m_framesData;
};
class H264Encoder : public GStreamerVideoEncoder {
@@ -474,7 +509,6 @@
const gchar* Caps() final { return "video/x-vp8"; }
const gchar* Name() final { return cricket::kVp8CodecName; }
webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecVP8; }
- virtual const gchar* ProfileName() { return "Profile Realtime"; }
void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecifiInfos, GstBuffer* buffer) final
{