Hi all!

I just finnished my work on a working H264VideoStreamFramer for files
containing plain h264. I'd like to post it here, maybe it can be
integrated some how in the main tree.

Best regards,

Robert



----------------------------------------
Bluetechnix Mechatronische Systeme GmbH
Robert Klotzner
Waidhausenstr. 3/19
1140 Wien
AUSTRIA
Development Office (Delivery Address):
Lainzerstr. 162/32)
1130 Wien
AUSTRIA
Tel: +43 (1) 914 20 91 x DW3)
Fax: +43 (1) 914 20 91 x 99
Email: robert.klotz...@bluetechnix.at)
Website: www.bluetechnix.com
---------------------------------------

--- /dev/null
+++ b/liveMedia/MyH264VideoStreamFramer.cpp
@@ -0,0 +1,153 @@
+/**********
+		   This library is free software; you can redistribute it and/or modify it under
+		   the terms of the GNU Lesser General Public License as published by the
+		   Free Software Foundation; either version 2.1 of the License, or (at your
+		   option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
+
+		   This library is distributed in the hope that it will be useful, but WITHOUT
+		   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+		   FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
+		   more details.
+
+		   You should have received a copy of the GNU Lesser General Public License
+		   along with this library; if not, write to the Free Software Foundation, Inc.,
+		   51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+**********/
+// "liveMedia"
+// Copyright (c) 1996-2008 Live Networks, Inc.  All rights reserved.
+// Author: Robert Klotzner <robert.klotz...@bluetechnix.at>
+// Bluetechnix Mechatronische Systeme www.bluetechnix.at
+// Any source that feeds into a "H264VideoRTPSink" must be of this class.
+// This is a virtual base class; subclasses must implement the
+// "currentNALUnitEndsAccessUnit()" virtual function.
+// Implementation
+
+#include "MyH264VideoStreamFramer.hh"
+
+
+
+MyH264VideoStreamFramer::MyH264VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource)
+	: H264VideoStreamFramer(env, inputSource), fInput_pos(fInputBuffer), fEnd_pos(fInputBuffer-1), fSavedFrameSize(0), fCurrentNalUnitEndsAccessUnit(false) {
+}
+
+MyH264VideoStreamFramer::~MyH264VideoStreamFramer() {
+}
+
+Boolean MyH264VideoStreamFramer::isH264VideoStreamFramer() const {
+	return True;
+}
+  
+Boolean MyH264VideoStreamFramer::currentNALUnitEndsAccessUnit() {
+	return fCurrentNalUnitEndsAccessUnit;
+}
+
+void MyH264VideoStreamFramer::doGetNextFrame()
+{
+	if(fInput_pos>=fEnd_pos) {
+		fInputSource->getNextFrame(fInputBuffer, MY_H264_BUFFER_SIZE, afterGettingFrame, this, handleClosure, this);
+	}
+	else
+		afterGettingFrame1(fEnd_pos-fInput_pos, fNumTruncatedBytes, fPresentationTime, fDurationInMicroseconds);
+}
+
+
+
+void MyH264VideoStreamFramer::afterGettingFrame(void* clientData, unsigned frameSize,
+												unsigned numTruncatedBytes,
+												struct timeval presentationTime,
+												unsigned durationInMicroseconds) {
+	static_cast<MyH264VideoStreamFramer*>(clientData)->afterGettingFrame1(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
+}
+
+static int get_byte_helper(uint8_t** from, uint8_t* end) {
+	static int zeros=0;
+
+	if(!zeros) { //No zeros found yet- search them ;-)
+		for(;(*from)<end && **from==0x0; zeros++, (*from)++) 
+			;
+	}
+	if(*from>=end) { //If we are at the end- return all zeros so far and then -1
+		if(zeros>0) {
+			zeros--;
+			return 0;
+		}
+		return -1;
+	}
+	if(**from==0x01 && zeros>=2) { //Start sequence found
+		if(zeros>3) {
+			zeros--;
+			return 0;
+		}
+		zeros=0; //Ignore start sequence
+		(*from)++; //Ignore start sequence
+		return -2;
+	}
+	// No starting sequence found
+	if(zeros>0) {
+		zeros--;
+		return 0;
+	}
+	return *(*from)++;
+}
+	
+
+
+
+void MyH264VideoStreamFramer::afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes,
+												 struct timeval presentationTime, unsigned durationInMicroseconds) {
+	fEnd_pos=fInput_pos+frameSize;
+	if (numTruncatedBytes > 0) {
+		envir() << "MyH264VideoStreamFramer::afterGettingFrame1(): The input frame data was too large for our spcified maximum payload size ("
+				<< MY_H264_BUFFER_SIZE << ").  "
+				<< numTruncatedBytes << " bytes of trailing data was dropped!\n";
+//		fNumTruncatedBytes=numTruncatedBytes;
+	}
+	int buf;
+
+	uint8_t *end=fInput_pos+(frameSize < fMaxSize ? frameSize : fMaxSize);
+
+	fFrameSize=fSavedFrameSize; //Zero except we haven't finnished a frame before.
+	do {
+		while((buf=get_byte_helper(&fInput_pos, end))>=0) {//As long as we hit the end or reach a start code.
+			*fTo++=buf;
+			fFrameSize++;
+		}
+	}
+	while(fFrameSize==0);
+	//See if last nal unit ended access unit. I don't know if this makes any sense to do this, this way.
+	if(fSavedFrameSize==0) { //We are at the beginning of a frame.
+		fCurrentNalUnitEndsAccessUnit= ((*(fTo-fFrameSize) & 0x0f) == 0x09) ? true : false;
+	}
+	if(fInput_pos>=end) {
+		if(end<fEnd_pos) { //We reached the end before finding next startcode.
+			fNumTruncatedBytes=frameSize-fMaxSize > 0 ? frameSize-fMaxSize : fMaxSize - frameSize;
+		}
+		else { 
+			fInput_pos=fInputBuffer; // Reached buffer end- reset.
+			fEnd_pos=fInputBuffer-1; //Prepare for new read in
+			fSavedFrameSize=fFrameSize; //Store the number of already read bytes
+			doGetNextFrame(); //Do it
+			return;
+		}
+	}
+	int frameDuration = 30;
+
+	
+	// Compute "fPresentationTime" 
+	if (fFrameSize == 5) // first frame
+		gettimeofday(&fPresentationTime, NULL);
+	else {
+		fPresentationTime.tv_usec += (long) frameDuration*1000;
+		fPresentationTime.tv_sec+=fPresentationTime.tv_usec/1000000;
+		fPresentationTime.tv_usec%=1000000;
+	}
+
+	
+	// Compute "fDurationInMicroseconds" 
+	fDurationInMicroseconds = (unsigned int) frameDuration*1000;
+	fSavedFrameSize=0;  //Prepare for new read in.
+	afterGetting(this); //Tell the sink about our success LOL
+}
+
+
+	
diff --git a/liveMedia/include/MyH264VideoStreamFramer.hh b/liveMedia/include/MyH264VideoStreamFramer.hh
new file mode 100644
index 0000000..c422165
--- /dev/null
+++ b/liveMedia/include/MyH264VideoStreamFramer.hh
@@ -0,0 +1,69 @@
+/**********
+		   This library is free software; you can redistribute it and/or modify it under
+		   the terms of the GNU Lesser General Public License as published by the
+		   Free Software Foundation; either version 2.1 of the License, or (at your
+		   option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
+
+		   This library is distributed in the hope that it will be useful, but WITHOUT
+		   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+		   FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
+		   more details.
+
+		   You should have received a copy of the GNU Lesser General Public License
+		   along with this library; if not, write to the Free Software Foundation, Inc.,
+		   51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+**********/
+// "liveMedia"
+// Copyright (c) 1996-2008 Live Networks, Inc.  All rights reserved.
+// Author: Robert Klotzner <robert.klotz...@bluetechnix.at>
+// Bluetechnix Mechatronische Systeme www.bluetechnix.at
+// Any source that feeds into a "H264VideoRTPSink" must be of this class.
+// This is a virtual base class; subclasses must implement the
+// "currentNALUnitEndsAccessUnit()" virtual function.
+// C++ header
+
+#ifndef _MY_H264_VIDEO_STREAM_FRAMER_HH
+#define _MY_H264_VIDEO_STREAM_FRAMER_HH
+
+#include "H264VideoStreamFramer.hh"
+
+#define MY_H264_BUFFER_SIZE 150000
+class MyH264VideoStreamFramer : public H264VideoStreamFramer {
+public:
+	virtual Boolean currentNALUnitEndsAccessUnit();
+	// subclasses must define this function.  It returns True iff the
+	// most recently received NAL unit ends a video 'access unit' (i.e., 'frame')
+	virtual void doGetNextFrame();
+	static MyH264VideoStreamFramer* createNew(UsageEnvironment& env, FramedSource* videoES) {
+		return new MyH264VideoStreamFramer(env, videoES);
+	}
+
+	static void continueReadProcessing(void* clientData,
+									   unsigned char* ptr, unsigned size,
+									   struct timeval presentationTime);
+	void continueReadProcessing();
+
+
+protected:
+	MyH264VideoStreamFramer(UsageEnvironment& env, FramedSource* inputSource);
+//	virtual void restoreSavedParserState();
+	virtual ~MyH264VideoStreamFramer();
+	static void afterGettingFrame(void* clientData, unsigned frameSize,
+				  unsigned numTruncatedBytes,
+				  struct timeval presentationTime,
+				  unsigned durationInMicroseconds);
+	static void onCloseCleanup(void* clientData);
+	void onCloseCleanup1();
+	void afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds);
+
+private:
+	// redefined virtual functions:
+	virtual Boolean isH264VideoStreamFramer() const;
+		uint8_t fInputBuffer[MY_H264_BUFFER_SIZE];
+	uint8_t* fInput_pos;
+	uint8_t* fEnd_pos;
+	int fSavedFrameSize;
+	Boolean fCurrentNalUnitEndsAccessUnit;
+};
+
+#endif
diff --git a/testProgs/Makefile.tail b/testProgs/Makefile.tail
index 8707646..1157dde 100644
--- a/testProgs/Makefile.tail
+++ b/testProgs/Makefile.tail
@@ -31,6 +31,7 @@ MPEG_1OR2_VIDEO_RECEIVER_OBJS = testMPEG1or2VideoReceiver.$(OBJ)
 MPEG_1OR2_AUDIO_VIDEO_STREAMER_OBJS = testMPEG1or2AudioVideoStreamer.$(OBJ)
 MPEG2_TRANSPORT_STREAMER_OBJS = testMPEG2TransportStreamer.$(OBJ)
 MPEG4_VIDEO_STREAMER_OBJS = testMPEG4VideoStreamer.$(OBJ)
+H264_VIDEO_STREAMER_OBJS = testH264VideoStreamer.$(OBJ)
 WAV_AUDIO_STREAMER_OBJS = testWAVAudioStreamer.$(OBJ)
 AMR_AUDIO_STREAMER_OBJS	= testAMRAudioStreamer.$(OBJ)
 ON_DEMAND_RTSP_SERVER_OBJS	= testOnDemandRTSPServer.$(OBJ)
@@ -80,6 +81,8 @@ testMPEG2TransportStreamer$(EXE):	$(MPEG2_TRANSPORT_STREAMER_OBJS) $(LOCAL_LIBS)
 	$(LINK)$@ $(CONSOLE_LINK_OPTS) $(MPEG2_TRANSPORT_STREAMER_OBJS) $(LIBS)
 testMPEG4VideoStreamer$(EXE):	$(MPEG4_VIDEO_STREAMER_OBJS) $(LOCAL_LIBS)
 	$(LINK)$@ $(CONSOLE_LINK_OPTS) $(MPEG4_VIDEO_STREAMER_OBJS) $(LIBS)
+testH264VideoStreamer$(EXE):	$(H264_VIDEO_STREAMER_OBJS) $(LOCAL_LIBS)
+	$(LINK)$@ $(CONSOLE_LINK_OPTS) $(H264_VIDEO_STREAMER_OBJS) $(LIBS)
 testWAVAudioStreamer$(EXE):	$(WAV_AUDIO_STREAMER_OBJS) $(LOCAL_LIBS)
 	$(LINK)$@ $(CONSOLE_LINK_OPTS) $(WAV_AUDIO_STREAMER_OBJS) $(LIBS)
 testAMRAudioStreamer$(EXE):	$(AMR_AUDIO_STREAMER_OBJS) $(LOCAL_LIBS)
diff --git a/testProgs/testH264VideoStreamer.cpp b/testProgs/testH264VideoStreamer.cpp
new file mode 100644
index 0000000..f7c3682
--- /dev/null
+++ b/testProgs/testH264VideoStreamer.cpp
@@ -0,0 +1,131 @@
+/**********
+This library is free software; you can redistribute it and/or modify it under
+the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation; either version 2.1 of the License, or (at your
+option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
+
+This library is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
+more details.
+
+You should have received a copy of the GNU Lesser General Public License
+along with this library; if not, write to the Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
+**********/
+// Copyright (c) 1996-2008, Live Networks, Inc.  All rights reserved
+// A test program that reads a MPEG-4 Video Elementary Stream file,
+// and streams it using RTP
+// main program
+
+#include "liveMedia.hh"
+#include "BasicUsageEnvironment.hh"
+#include "GroupsockHelper.hh"
+#include "MyH264VideoStreamFramer.hh"
+#include "H264VideoRTPSink.hh"
+
+UsageEnvironment* env;
+char const* inputFileName = "test1.264";
+MyH264VideoStreamFramer* videoSource;
+RTPSink* videoSink;
+
+void play(); // forward
+
+int main(int argc, char** argv) {
+  // Begin by setting up our usage environment:
+  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
+  env = BasicUsageEnvironment::createNew(*scheduler);
+
+  // Create 'groupsocks' for RTP and RTCP:
+  struct in_addr destinationAddress;
+  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
+  // Note: This is a multicast address.  If you wish instead to stream
+  // using unicast, then you should use the "testOnDemandRTSPServer"
+  // test program - not this test program - as a model.
+
+  const unsigned short rtpPortNum = 18888;
+  const unsigned short rtcpPortNum = rtpPortNum+1;
+  const unsigned char ttl = 255;
+
+  const Port rtpPort(rtpPortNum);
+  const Port rtcpPort(rtcpPortNum);
+
+  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
+  rtpGroupsock.multicastSendOnly(); // we're a SSM source
+  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
+  rtcpGroupsock.multicastSendOnly(); // we're a SSM source
+
+  // Create a 'MPEG-4 Video RTP' sink from the RTP 'groupsock':
+  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96, 0x42, "h264");
+
+  // Create (and start) a 'RTCP instance' for this RTP sink:
+  const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
+  const unsigned maxCNAMElen = 100;
+  unsigned char CNAME[maxCNAMElen+1];
+  gethostname((char*)CNAME, maxCNAMElen);
+  CNAME[maxCNAMElen] = '\0'; // just in case
+  RTCPInstance* rtcp
+  = RTCPInstance::createNew(*env, &rtcpGroupsock,
+			    estimatedSessionBandwidth, CNAME,
+			    videoSink, NULL /* we're a server */,
+			    True /* we're a SSM source */);
+  // Note: This starts RTCP running automatically
+
+  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
+  if (rtspServer == NULL) {
+    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
+    exit(1);
+  }
+  ServerMediaSession* sms
+    = ServerMediaSession::createNew(*env, "testStream", inputFileName,
+		   "Session streamed by \"testH264VideoStreamer\"",
+					   True /*SSM*/);
+  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
+  rtspServer->addServerMediaSession(sms);
+
+  char* url = rtspServer->rtspURL(sms);
+  *env << "Play this stream using the URL \"" << url << "\"\n";
+  delete[] url;
+
+  // Start the streaming:
+  *env << "Beginning streaming...\n";
+  play();
+
+  env->taskScheduler().doEventLoop(); // does not return
+
+  return 0; // only to prevent compiler warning
+}
+
+void afterPlaying(void* /*clientData*/) {
+  *env << "...done reading from file\n";
+  Medium::close(videoSource);
+  videoSink->stopPlaying();
+
+  // Note that this also closes the input file that this source read from.
+
+  // Start playing once again:
+  play();
+}
+
+void play() {
+  // Open the input file as a 'byte-stream file source':
+  ByteStreamFileSource* fileSource
+    = ByteStreamFileSource::createNew(*env, inputFileName);
+  if (fileSource == NULL) {
+    *env << "Unable to open file \"" << inputFileName
+	 << "\" as a byte-stream file source\n";
+    exit(1);
+  }
+
+  FramedSource* videoES = fileSource;
+
+  // Create a framer for the Video Elementary Stream:
+  videoSource = MyH264VideoStreamFramer::createNew(*env, videoES);
+
+  // Finally, start playing:
+  *env << "Beginning to read from file...\n";
+  if(!videoSink->startPlaying(*videoSource, afterPlaying, videoSink)) {
+	  *env<<"Shit happens!\n";
+	  exit(-1);
+  }
+}
_______________________________________________
live-devel mailing list
live-devel@lists.live555.com
http://lists.live555.com/mailman/listinfo/live-devel

Reply via email to