The current patched (attached):

- Implements the concept of a "buffer" (meaning the encoded frames buffer, file 
based for us)
- Handles sending bufferFull/bufferEmpty events correctly (AFAICS)
- Is still non-threaded 
- Is still w/out sound
- Still only supports FLV

tests and impressions welcome.

--strk; 

 ()   ASCII Ribbon Campaign
 /\   Keep it simple! 

Index: server/asobj/NetStreamFfmpeg.cpp
===================================================================
RCS file: /sources/gnash/gnash/server/asobj/NetStreamFfmpeg.cpp,v
retrieving revision 1.133
diff -u -r1.133 NetStreamFfmpeg.cpp
--- server/asobj/NetStreamFfmpeg.cpp    22 May 2008 11:30:17 -0000      1.133
+++ server/asobj/NetStreamFfmpeg.cpp    24 May 2008 10:38:45 -0000
@@ -32,7 +32,7 @@
 #include "movie_root.h"
 #include "sound_handler.h"
 #include "VideoDecoderFfmpeg.h"
-#include "ClockTime.h" // TODO: use the VirtualClock instead ?
+#include "SystemClock.h"
 
 #include "FLVParser.h" 
 
@@ -48,24 +48,28 @@
 #endif
 
 /// Define this to add debugging prints for locking
-#define GNASH_DEBUG_THREADS
+//#define GNASH_DEBUG_THREADS
 
 // Define the following macro to have status notification handling debugged
 //#define GNASH_DEBUG_STATUS
 
+namespace {
+
 // Used to free data in the AVPackets we create our self
-static void avpacket_destruct(AVPacket* av)
+void avpacket_destruct(AVPacket* av)
 {
        delete [] av->data;
 }
 
+} // anonymous namespace
+
 
 namespace gnash {
 
 
-NetStreamFfmpeg::NetStreamFfmpeg():
+NetStreamFfmpeg::NetStreamFfmpeg()
+       :
 
-       _playback_state(PLAY_NONE),
        _decoding_state(DEC_NONE),
 
        m_video_index(-1),
@@ -82,9 +86,11 @@
 
        m_last_video_timestamp(0),
        m_last_audio_timestamp(0),
-       m_current_timestamp(0),
+
+       _playbackClock(new InterruptableVirtualClock(new SystemClock)),
+       _playHead(_playbackClock.get()), 
+
        m_unqueued_data(NULL),
-       m_time_of_pause(0),
 
        _decoderBuffer(0),
        _soundHandler(get_sound_handler())
@@ -106,23 +112,21 @@
 void NetStreamFfmpeg::pause( PauseMode mode )
 {
        log_debug("::pause(%d) called ", mode);
-  switch ( mode ) {
-    case pauseModeToggle:
-                       if ( playbackStatus() == PLAY_PAUSED ) {
-                         unpausePlayback();
-                       } else {
-                         pausePlayback();
-                       }
+       switch ( mode )
+       {
+               case pauseModeToggle:
+                       if ( _playHead.getState() == PlayHead::PLAY_PAUSED) 
unpausePlayback();
+                       else pausePlayback();
                        break;
-    case pauseModePause:
+               case pauseModePause:
                        pausePlayback();
                        break;
-    case pauseModeUnPause:
+               case pauseModeUnPause:
                        unpausePlayback();
                        break;
-    default:
+               default:
                        break;
-  }
+       }
 
 }
 
@@ -164,11 +168,6 @@
        delete m_unqueued_data;
        m_unqueued_data = NULL;
 
-       boost::mutex::scoped_lock lock(_qMutex);
-
-       m_qvideo.clear();
-       m_qaudio.clear();
-
        delete [] ByteIOCxt.buffer;
 
 }
@@ -226,12 +225,10 @@
 void
 NetStreamFfmpeg::play(const std::string& c_url)
 {
-
        // Is it already playing ?
-       if (playbackStatus() != PLAY_NONE && playbackStatus() != PLAY_STOPPED)
+       if ( m_parser.get() )
        {
-               log_error("NetStream.play() called already playing ?"); // 
TODO: fix this case
-               //unpausePlayback(); // will check for playbackStatus itself..
+               log_error("NetStream.play() called while already streaming ?"); 
// TODO: fix this case
                return;
        }
 
@@ -258,15 +255,13 @@
                return;
        }
 
-       //decodingStatus(DEC_BUFFERING);
-
        // We need to restart the audio
        if (_soundHandler)
                _soundHandler->attach_aux_streamer(audio_streamer, this);
 
        // This starts the decoding thread
-       _decodeThread = new 
boost::thread(boost::bind(NetStreamFfmpeg::av_streamer, this)); 
-       _decodeThreadBarrier.wait();
+       //_decodeThread = new 
boost::thread(boost::bind(NetStreamFfmpeg::av_streamer, this)); 
+       //_decodeThreadBarrier.wait();
 
        return;
 }
@@ -467,8 +462,6 @@
                m_video_index = 0;
                m_audio_index = 1;
 
-               m_start_onbuffer = true;
-
                // Allocate a frame to store the decoded frame in
                m_Frame = avcodec_alloc_frame();
        }
@@ -602,8 +595,16 @@
                }
        }
 
-       playbackStatus(PLAY_PLAYING);
-       m_start_clock = clocktime::getTicks();
+       _playHead.init(m_VCodecCtx!=0, false); // second arg should be 
m_ACodecCtx!=0, but we're testing video only for now
+       _playHead.setState(PlayHead::PLAY_PLAYING);
+
+       decodingStatus(DEC_BUFFERING);
+
+//#ifdef GNASH_DEBUG_STATUS
+       log_debug("Setting playStart status");
+//#endif
+       setStatus(playStart);
+
        return true;
 }
 
@@ -638,106 +639,14 @@
 
        ns->_decodeThreadBarrier.wait();
 
-       //assert (ns->m_ACodecCtx); // is only set if audio decoder could be 
initialized 
-       //assert (ns->m_VCodecCtx); // is only set if video decder could be 
initialized 
-       //assert (ns->m_FormatCtx); // is only set for non-flv
-
-       ns->setStatus(playStart);
-
-       ns->m_last_video_timestamp = 0;
-       ns->m_last_audio_timestamp = 0;
-       ns->m_current_timestamp = 0;
-
-       ns->m_start_clock = clocktime::getTicks();
-
-       ns->m_unqueued_data = NULL;
-
-       // Loop until killed
-       while ( ! ns->decodeThreadKillRequested() ) // locks _qMutex
+       // Parse in a thread...
+       abort(); // has to be fixed to use mutex against parser
+       // FIXME: 
+       while ( ! ns->m_parser->parsingCompleted()
+               && ! ns->decodeThreadKillRequested() ) 
        {
-               unsigned long int sleepTime = 1000;
-
-               {
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: waiting for lock in av_streamer");
-#endif
-               boost::mutex::scoped_lock lock(ns->_qMutex);
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: lock obtained in av_streamer");
-#endif
-
-               if ( ns->decodingStatus() == DEC_STOPPED )
-               {
-                       log_debug("Dec stopped (eof), waiting on qNeedRefill 
condition");
-                       ns->_qFillerResume.wait(lock);
-                       continue; // will release the lock for a moment
-               }
-
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("Decoding iteration. bufferTime=%lu, bufferLen=%lu, 
videoFrames=%lu, audioFrames=%lu",
-                       ns->bufferTime(), ns->bufferLength(), 
ns->m_qvideo.size(), ns->m_qaudio.size());
-#endif
-
-               if (ns->m_isFLV)
-               {
-                       // If any of the two queues are full don't bother 
fetching more
-                       // (next consumer will wake us up)
-                       //
-                       if ( ns->m_qvideo.full() || ns->m_qaudio.full() )
-                       {
-                               ns->decodingStatus(DEC_DECODING); // that's to 
say: not buffering anymore
-
-                               // Instead wait till waked up by short-queues 
event
-                               log_debug("Queues full, waiting on qNeedRefill 
condition");
-                               ns->_qFillerResume.wait(lock);
-                       }
-                       else
-                       {
-                               log_debug("Calling decodeFLVFrame");
-                               bool successDecoding = ns->decodeFLVFrame();
-                               //log_debug("decodeFLVFrame returned %d", 
successDecoding);
-                               if ( ! successDecoding )
-                               {
-                                       // Possible failures:
-                                       // 1. could not decode frame... lot's 
of possible
-                                       //    reasons...
-                                       // 2. EOF reached
-                                       if ( ns->m_videoFrameFormat != 
render::NONE )
-                                       {
-                                               log_error("Could not decode FLV 
frame");
-                                       }
-                                       // else it's expected, we'll keep going 
anyway
-                               }
-
-                       }
-
-               }
-               else
-               {
-
-                       // If we have problems with decoding - break
-                       if (ns->decodeMediaFrame() == false && 
ns->m_start_onbuffer == false && ns->m_qvideo.size() == 0 && 
ns->m_qaudio.size() == 0)
-                       {
-                               break;
-                       }
-
-               }
-
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: releasing lock in av_streamer");
-#endif
-               }
-
-               //log_debug("Sleeping %d microseconds", sleepTime);
-               usleep(sleepTime); // Sleep 1ms to avoid busying the processor.
-
+               ns->m_parser->parseNextTag();
        }
-
-//#ifdef GNASH_DEBUG_THREADS
-       log_debug("Out of decoding loop. playbackStatus:%d, decodingStatus:%d", 
ns->playbackStatus(), ns->decodingStatus());
-//#endif
-       ns->decodingStatus(DEC_STOPPED);
-
 }
 
 // audio callback is running in sound handler thread
@@ -745,6 +654,10 @@
 {
        //GNASH_REPORT_FUNCTION;
 
+       return false;
+
+#if 0 // no audio for now, needs proper mutex design first (SDL sound handler 
runs in a thread)
+
        NetStreamFfmpeg* ns = static_cast<NetStreamFfmpeg*>(owner);
 
        PlaybackState pbStatus = ns->playbackStatus();
@@ -797,10 +710,100 @@
 #endif
        }
        return true;
+#endif
+}
+
+media::raw_mediadata_t*
+NetStreamFfmpeg::getDecodedVideoFrame(boost::uint32_t ts)
+{
+       if ( ! m_parser.get() )
+       {
+               log_error("getDecodedVideoFrame: no parser available");
+               return 0; // no parser, no party
+       }
+
+       FLVVideoFrameInfo* info = m_parser->peekNextVideoFrameInfo();
+       if ( ! info )
+       {
+               log_error("getDecodedVideoFrame(%d): no more video frames in 
input (peekNextVideoFrameInfo returned false)");
+               decodingStatus(DEC_STOPPED);
+               return 0;
+       }
+
+       if ( info->timestamp > ts )
+       {
+               log_error("getDecodedVideoFrame(%d): next video frame is in the 
future (%d)", ts, info->timestamp);
+               return 0; // next frame is in the future
+       }
+
+       // Loop until a good frame is found
+       media::raw_mediadata_t* video = 0;
+       while ( 1 )
+       {
+               video = decodeNextVideoFrame();
+               if ( ! video )
+               {
+                       log_error("peekNextVideoFrameInfo returned some info, "
+                               "but decodeNextVideoFrame returned null, "
+                               "I don't think this should ever happen");
+                       break;
+               }
+
+               FLVVideoFrameInfo* info = m_parser->peekNextVideoFrameInfo();
+               if ( ! info )
+               {
+                       // the one we decoded was the last one
+                       log_debug("last video frame decoded (should set 
playback status to STOP?)");
+                       break;
+               }
+               if ( info->timestamp > ts )
+               {
+                       // the next one is in the future, we'll return this one.
+                       log_debug("next video frame is in the future, we'll 
return this one");
+                       break; // the one we decoded
+               }
+       }
+
+       return video;
+}
+
+media::raw_mediadata_t*
+NetStreamFfmpeg::decodeNextVideoFrame()
+{
+       if ( ! m_parser.get() )
+       {
+               log_error("decodeNextVideoFrame: no parser available");
+               return 0; // no parser, no party
+       }
+
+       FLVFrame* frame = m_parser->nextVideoFrame(); 
+       if (frame == NULL)
+       {
+               log_debug("decodeNextVideoFrame: no more video frames in 
input");
+               return 0;
+       }
+       assert (frame->type == videoFrame);
+
+       AVPacket packet;
+
+       packet.destruct = avpacket_destruct; // needed ?
+       packet.size = frame->dataSize;
+       packet.data = frame->data;
+       // FIXME: is this the right value for packet.dts?
+       packet.pts = packet.dts = static_cast<boost::int64_t>(frame->timestamp);
+       assert (frame->type == videoFrame);
+       packet.stream_index = 0;
+
+       return decodeVideo(&packet);
 }
 
-bool NetStreamFfmpeg::decodeFLVFrame()
+bool
+NetStreamFfmpeg::decodeFLVFrame()
 {
+#if 1
+       abort();
+       return false;
+#else
        FLVFrame* frame = m_parser->nextMediaFrame(); // we don't care which 
one, do we ?
 
        if (frame == NULL)
@@ -822,21 +825,40 @@
        if (frame->type == videoFrame)
        {
                packet.stream_index = 0;
-               return decodeVideo(&packet);
+               media::raw_mediadata_t* video = decodeVideo(&packet);
+               assert (m_isFLV);
+               if (video)
+               {
+                       // NOTE: Caller is assumed to have locked _qMutex 
already
+                       if ( ! m_qvideo.push(video) )
+                       {
+                               log_error("Video queue full !");
+                       }
+               }
        }
        else
        {
                assert(frame->type == audioFrame);
                packet.stream_index = 1;
-               return decodeAudio(&packet);
+               media::raw_mediadata_t* audio = decodeAudio(&packet);
+               if ( audio )
+               {
+                       if ( ! m_qaudio.push(audio) )
+                       {
+                               log_error("Audio queue full!");
+                       }
+               }
        }
 
+       return true;
+#endif
 }
 
 
-bool NetStreamFfmpeg::decodeAudio( AVPacket* packet )
+media::raw_mediadata_t* 
+NetStreamFfmpeg::decodeAudio( AVPacket* packet )
 {
-       if (!m_ACodecCtx) return false;
+       if (!m_ACodecCtx) return 0;
 
        int frame_size;
        //static const unsigned int bufsize = (AVCODEC_MAX_AUDIO_FRAME_SIZE * 
3) / 2;
@@ -944,26 +966,20 @@
 
                m_last_audio_timestamp += frame_delay;
 
-               if (m_isFLV)
-               {
-                       if ( ! m_qaudio.push(raw) )
-                       {
-                               log_error("Audio queue full!");
-                       }
-               }
-               else m_unqueued_data = m_qaudio.push(raw) ? NULL : raw;
+               return raw;
        }
-       return true;
+       return 0;
 }
 
 
-bool NetStreamFfmpeg::decodeVideo(AVPacket* packet)
+media::raw_mediadata_t* 
+NetStreamFfmpeg::decodeVideo(AVPacket* packet)
 {
-       if (!m_VCodecCtx) return false;
+       if (!m_VCodecCtx) return NULL;
 
        int got = 0;
        avcodec_decode_video(m_VCodecCtx, m_Frame, &got, packet->data, 
packet->size);
-       if (!got) return false;
+       if (!got) return NULL;
 
        // This tmpImage is really only used to compute proper size of the 
video data...
        // stupid isn't it ?
@@ -982,7 +998,7 @@
        if (m_videoFrameFormat == render::NONE)
        {
                // NullGui?
-               return false;
+               return NULL;
 
        }
        else if (m_videoFrameFormat == render::YUV && m_VCodecCtx->pix_fmt != 
PIX_FMT_YUV420P)
@@ -997,7 +1013,7 @@
                rgbpicture = 
media::VideoDecoderFfmpeg::convertRGB24(m_VCodecCtx, *m_Frame);
                if (!rgbpicture.data[0])
                {
-                       return false;
+                       return NULL;
                }
        }
 
@@ -1088,21 +1104,14 @@
 
        }
 
-       // NOTE: Caller is assumed to have locked _qMutex already
-       if (m_isFLV)
-       {
-               if ( ! m_qvideo.push(video) )
-               {
-                       log_error("Video queue full !");
-               }
-       }
-       else m_unqueued_data = m_qvideo.push(video) ? NULL : video;
-
-       return true;
+       return video;
 }
 
 bool NetStreamFfmpeg::decodeMediaFrame()
 {
+       return false;
+
+#if 0 // Only FLV for now (non-FLV should be threated the same as FLV, using a 
MediaParser in place of the FLVParser)
 
        if (m_unqueued_data)
        {
@@ -1132,20 +1141,24 @@
        {
                if (packet.stream_index == m_audio_index && _soundHandler)
                {
-                       if (!decodeAudio(&packet)) 
+                       media::raw_mediadata_t* audio = decodeAudio(&packet);
+                       if (!audio)
                        {
                                log_error(_("Problems decoding audio frame"));
                                return false;
                        }
+                       m_unqueued_data = m_qaudio.push(audio) ? NULL : audio;
                }
                else
                if (packet.stream_index == m_video_index)
                {
-                       if (!decodeVideo(&packet)) 
+                       media::raw_mediadata_t* video = decodeVideo(&packet);
+                       if (!video)
                        {
                                log_error(_("Problems decoding video frame"));
                                return false;
                        }
+                       m_unqueued_data = m_qvideo.push(video) ? NULL : video;
                }
                av_free_packet(&packet);
        }
@@ -1156,15 +1169,24 @@
        }
 
        return true;
+#endif
 }
 
 void
-NetStreamFfmpeg::seek(boost::uint32_t pos)
+NetStreamFfmpeg::seek(boost::uint32_t posSeconds)
 {
        GNASH_REPORT_FUNCTION;
 
-       // We'll mess with the queues here
-       boost::mutex::scoped_lock lock(_qMutex);
+       // We'll mess with the input here
+       if ( ! m_parser.get() )
+       {
+               log_debug("NetStreamFfmpeg::seek(%d): no parser, no party", 
posSeconds);
+               return;
+       }
+
+       // Don't ask me why, but NetStream::seek() takes seconds...
+       boost::uint32_t pos = posSeconds*1000;
+
 
        long newpos = 0;
        double timebase = 0;
@@ -1172,18 +1194,11 @@
        // Seek to new position
        if (m_isFLV)
        {
-               if (m_parser.get())
-               {
-                       newpos = m_parser->seek(pos);
-               }
-               else
-               {
-                       newpos = 0;
-               }
+               newpos = m_parser->seek(pos);
+               log_debug("m_parser->seek(%d) returned %d", pos, newpos);
        }
        else if (m_FormatCtx)
        {
-
                AVStream* videostream = m_FormatCtx->streams[m_video_index];
                timebase = static_cast<double>(videostream->time_base.num / 
videostream->time_base.den);
                newpos = static_cast<long>(pos / timebase);
@@ -1205,20 +1220,11 @@
        {
                m_last_video_timestamp = 0;
                m_last_audio_timestamp = 0;
-               m_current_timestamp = 0;
-
-               m_start_clock = clocktime::getTicks();
-
        }
        else if (m_isFLV)
        {
-
-               if (m_VCodecCtx) m_start_clock += m_last_video_timestamp - 
newpos;
-               else m_start_clock += m_last_audio_timestamp - newpos;
-
                if (m_ACodecCtx) m_last_audio_timestamp = newpos;
                if (m_VCodecCtx) m_last_video_timestamp = newpos;
-               m_current_timestamp = newpos;
        }
        else
        {
@@ -1240,146 +1246,159 @@
                av_free_packet( &Packet );
 
                av_seek_frame(m_FormatCtx, m_video_index, newpos, 0);
-               boost::uint32_t newtime_ms = 
static_cast<boost::int32_t>(newtime / 1000.0);
-               m_start_clock += m_last_audio_timestamp - newtime_ms;
+               newpos = static_cast<boost::int32_t>(newtime / 1000.0);
 
-               m_last_audio_timestamp = newtime_ms;
-               m_last_video_timestamp = newtime_ms;
-               m_current_timestamp = newtime_ms;
+               m_last_audio_timestamp = newpos;
+               m_last_video_timestamp = newpos;
        }
        
-       // Flush the queues
-       m_qvideo.clear();
-       m_qaudio.clear();
+       // 'newpos' will always be on a keyframe (supposedly)
+       _playHead.seekTo(newpos);
 
-       decodingStatus(DEC_DECODING); // or ::refreshVideoFrame will send a 
STOPPED again
-       if ( playbackStatus() == PLAY_STOPPED )
-       {
-               // restart playback (if not paused)
-               playbackStatus(PLAY_PLAYING);
-       }
+       decodingStatus(DEC_BUFFERING); // make sure we have enough things in 
buffer
        _qFillerResume.notify_all(); // wake it decoder is sleeping
        
+       refreshVideoFrame(true);
 }
 
 void
-NetStreamFfmpeg::refreshVideoFrame()
+NetStreamFfmpeg::parseNextChunk()
 {
-#ifdef GNASH_DEBUG_THREADS
-       log_debug("qMutex: waiting for lock in refreshVideoFrame");
-#endif
-       boost::mutex::scoped_lock lock(_qMutex);
-#ifdef GNASH_DEBUG_THREADS
-       log_debug("qMutex: lock obtained in refreshVideoFrame");
-#endif
+       // TODO: parse as much as possible w/out blocking
+       //       (will always block currently..)
+       const int tagsPerChunk = 2;
+       for (int i=0; i<tagsPerChunk; ++i)
+               m_parser->parseNextTag();
+}
 
-       // If we're paused (and we got the first imageframe), there is no need 
to do this
-       if (playbackStatus() == PLAY_PAUSED && m_imageframe)
+void
+NetStreamFfmpeg::refreshVideoFrame(bool alsoIfPaused)
+{
+
+       if ( ! m_parser.get() )
        {
-               log_debug("refreshVideoFrame doing nothing as playback is 
paused and we have an image frame already");
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: releasing lock in refreshVideoFrame");
-#endif
+               log_debug("%p.refreshVideoFrame: no parser, no party", this);
                return;
        }
 
-       // Loop until a good frame is found
-       do
+       if ( decodingStatus() == DEC_DECODING && bufferLength() == 0)
        {
-               // Get video frame from queue, will have the lowest timestamp
-               // will return NULL if empty(). See multithread_queue::front
-               media::raw_mediadata_t* video = m_qvideo.front();
-
-               // If the queue is empty either we're waiting for more data
-               // to be decoded or we're out of data
-               if (!video)
+               if ( ! m_parser->parsingCompleted() )
                {
-                       log_debug("refreshVideoFrame:: No more video frames in 
queue");
-
-                       if ( decodingStatus() == DEC_STOPPED )
-                       {
-                               if ( playbackStatus() != PLAY_STOPPED )
-                               {
-                                       playbackStatus(PLAY_STOPPED);
-//#ifdef GNASH_DEBUG_STATUS
-                                       log_debug("Setting playStop status");
-//#endif
-                                       setStatus(playStop);
-                               }
-                       }
-                       else
-                       {
-                               // There no video but decoder is still running
-                               // not much to do here except wait for next call
-                               //assert(decodingStatus() == DEC_BUFFERING);
-                       }
-
-                       break;
+                       log_debug("%p.refreshVideoFrame: buffer empty while 
decoding,"
+                               " setting buffer to buffering and pausing 
playback clock",
+                               this);
+                       setStatus(bufferEmpty);
+                       decodingStatus(DEC_BUFFERING);
+                       _playbackClock->pause();
                }
-
-               // Caclulate the current time
-               boost::uint32_t current_clock;
-               if (m_ACodecCtx && _soundHandler)
+               else
                {
-                       current_clock = m_current_timestamp;
+                       // set playStop ? (will be done later for now)
                }
-               else
+       }
+
+       if ( decodingStatus() == DEC_BUFFERING )
+       {
+               if ( bufferLength() < m_bufferTime )
                {
-                       current_clock = clocktime::getTicks() - m_start_clock;
-                       m_current_timestamp = current_clock;
+                       log_debug("%p.refreshVideoFrame: buffering"
+                               " - position=%d, buffer=%d/%d",
+                               this, _playHead.getPosition(), bufferLength(), 
m_bufferTime);
+                       return;
                }
+               log_debug("%p.refreshVideoFrame: buffer full, resuming playback 
clock"
+                       " - position=%d, buffer=%d/%d",
+                       this, _playHead.getPosition(), bufferLength(), 
m_bufferTime);
+               setStatus(bufferFull);
+               decodingStatus(DEC_DECODING);
+               _playbackClock->resume();
+       }
 
-               boost::uint32_t video_clock = video->m_pts;
+       if ( ! alsoIfPaused && _playHead.getState() == PlayHead::PLAY_PAUSED )
+       {
+               log_debug("%p.refreshVideoFrame: doing nothing as playhead is 
paused - "
+                       "bufferLength=%d, bufferTime=%d",
+                       this, bufferLength(), m_bufferTime);
+               return;
+       }
 
-               // If the timestamp on the videoframe is smaller than the
-               // current time, we put it in the output image.
-               if (current_clock >= video_clock)
-               {
+       if ( _playHead.isVideoConsumed() ) 
+       {
+               log_debug("%p.refreshVideoFrame: doing nothing "
+                       "as current position was already decoded - "
+                       "bufferLength=%d, bufferTime=%d",
+                       this, bufferLength(), m_bufferTime);
+               return;
+       }
 
-                       if (m_videoFrameFormat == render::YUV)
-                       {
-                               if ( ! m_imageframe ) m_imageframe  = new 
image::yuv(m_VCodecCtx->width, m_VCodecCtx->height);
-                               // XXX m_imageframe might be a byte aligned 
buffer, while video is not!
-                               
static_cast<image::yuv*>(m_imageframe)->update(video->m_data);
-                       }
-                       else if (m_videoFrameFormat == render::RGB)
-                       {
-                               if ( ! m_imageframe ) m_imageframe  = new 
image::rgb(m_VCodecCtx->width, m_VCodecCtx->height);
-                               image::rgb* imgframe = 
static_cast<image::rgb*>(m_imageframe);
-                               rgbcopy(imgframe, video, m_VCodecCtx->width * 
3);
-                       }
+       // Caclulate the current time
+       boost::uint64_t curPos = _playHead.getPosition();
 
-                       // Delete the frame from the queue
-                       m_qvideo.pop();
-                       delete video;
+       log_debug("%p.refreshVideoFrame: currentPosition=%d, playHeadState=%d, 
bufferLength=%d, bufferTime=%d",
+               this, curPos, _playHead.getState(), bufferLength(), 
m_bufferTime);
 
-                       // wake up filler (TODO: do only if decoder is running)
-                       // TODO2: resume only at end of loop ?
-                       _qFillerResume.notify_all();
 
-                       // A frame is ready for pickup
-                       m_newFrameReady = true;
+       // Get next decoded video frame from parser, will have the lowest 
timestamp
+       media::raw_mediadata_t* video = getDecodedVideoFrame(curPos);
 
+       // to be decoded or we're out of data
+       if (!video)
+       {
+               if ( decodingStatus() == DEC_STOPPED )
+               {
+                       log_debug("%p.refreshVideoFrame(): no more video frames 
to decode, sending STOP event", this);
+//#ifdef GNASH_DEBUG_STATUS
+                       log_debug("Setting playStop status");
+//#endif
+                       setStatus(playStop);
                }
                else
                {
-                       // The timestamp on the first frame in the queue is 
greater
-                       // than the current time, so no need to do anything.
-                       break;
+                       log_debug("%p.refreshVideoFrame(): last video frame was 
good enough for current position", this);
+                       // There no video but decoder is still running
+                       // not much to do here except wait for next call
+                       //assert(decodingStatus() == DEC_BUFFERING);
                }
 
-       } while(!m_qvideo.empty());
+       }
+       else
+       {
+
+               if (m_videoFrameFormat == render::YUV)
+               {
+                       if ( ! m_imageframe ) m_imageframe  = new 
image::yuv(m_VCodecCtx->width, m_VCodecCtx->height);
+                       // XXX m_imageframe might be a byte aligned buffer, 
while video is not!
+                       
static_cast<image::yuv*>(m_imageframe)->update(video->m_data);
+               }
+               else if (m_videoFrameFormat == render::RGB)
+               {
+                       if ( ! m_imageframe ) m_imageframe  = new 
image::rgb(m_VCodecCtx->width, m_VCodecCtx->height);
+                       image::rgb* imgframe = 
static_cast<image::rgb*>(m_imageframe);
+                       rgbcopy(imgframe, video, m_VCodecCtx->width * 3);
+               }
+
+               // Delete the frame from the queue
+               delete video;
+
+               // A frame is ready for pickup
+               m_newFrameReady = true;
+       }
+
+       // We consumed video of current position, feel free to advance if needed
+       _playHead.setVideoConsumed();
+
 
-#ifdef GNASH_DEBUG_THREADS
-       log_debug("qMutex: releasing lock in refreshVideoFrame");
-#endif
 }
 
 
 void
 NetStreamFfmpeg::advance()
 {
-       //log_debug("advance");
+       log_debug("%p.advance : bufferLength=%d, bufferTime=%d",
+               this, bufferLength(), m_bufferTime);
+
+       if ( m_parser.get() ) parseNextChunk(); 
 
        // Check if there are any new status messages, and if we should
        // pass them to a event handler
@@ -1388,109 +1407,83 @@
        // Find video frame with the most suited timestamp in the video queue,
        // and put it in the output image frame.
        refreshVideoFrame();
+
+       // Refill audio buffer to consume all samples
+       // up to current playhead
+       //refreshAudioBuffer();
 }
 
 boost::int32_t
 NetStreamFfmpeg::time()
 {
-
-       if (m_FormatCtx && m_FormatCtx->nb_streams > 0)
-       {
-               double time = (double)m_FormatCtx->streams[0]->time_base.num / 
(double)m_FormatCtx->streams[0]->time_base.den * 
(double)m_FormatCtx->streams[0]->cur_dts;
-               return static_cast<boost::int32_t>(time);
-       }
-       else if
-       (m_isFLV)
-       {
-               return m_current_timestamp;
-       }
-       else
-       {
-               return 0;
-       }
+       return _playHead.getPosition();
 }
 
 void NetStreamFfmpeg::pausePlayback()
 {
        GNASH_REPORT_FUNCTION;
 
-       if (playbackStatus() == PLAY_PAUSED) return;
-
-       playbackStatus(PLAY_PAUSED);
-
-       // Save the current time so we later can tell how long the pause lasted
-       m_time_of_pause = clocktime::getTicks();
+       PlayHead::PlaybackStatus oldStatus = 
_playHead.setState(PlayHead::PLAY_PAUSED);
 
-       // Disconnect the soundhandler so we don't play while paused
-       if ( _soundHandler ) _soundHandler->detach_aux_streamer((void*)this);
+       // Disconnect the soundhandler if we were playing before
+       if ( oldStatus == PlayHead::PLAY_PLAYING && _soundHandler )
+       {
+               _soundHandler->detach_aux_streamer((void*)this);
+       }
 }
 
 void NetStreamFfmpeg::unpausePlayback()
 {
        GNASH_REPORT_FUNCTION;
 
-       if (playbackStatus() == PLAY_PLAYING) // already playing
-       {
-               log_debug("unpausePlayback: already playing");
-               return;
-       }
-
-       playbackStatus(PLAY_PLAYING);
+       PlayHead::PlaybackStatus oldStatus = 
_playHead.setState(PlayHead::PLAY_PLAYING);
 
-       if (m_current_timestamp == 0)
+       // Re-connect to the soundhandler if we were paused before
+       if ( oldStatus == PlayHead::PLAY_PAUSED && _soundHandler )
        {
-               m_start_clock = clocktime::getTicks();
+               _soundHandler->attach_aux_streamer(audio_streamer, (void*) 
this);
        }
-       else
-       {
-               // Add the paused time to the start time so that the playhead 
doesn't
-               // noticed that we have been paused
-               m_start_clock += clocktime::getTicks() - m_time_of_pause;
-       }
-
-       // (re)-connect to the soundhandler.
-       // It was disconnected in ::pausePlayback to avoid to keep playing 
sound while paused
-       if ( _soundHandler ) _soundHandler->attach_aux_streamer(audio_streamer, 
(void*) this);
 }
 
 
 long
 NetStreamFfmpeg::bytesLoaded ()
 {
-       long ret_val = 0;
-
-       if ( _netCon ) 
+       if ( ! m_parser.get() )
        {
-               ret_val = _netCon->getBytesLoaded();
+               log_debug("bytesLoaded: no parser, no party");
+               return 0;
        }
 
-       return ret_val;
+       return m_parser->getBytesLoaded();
 }
 
-
 long
-NetStreamFfmpeg::bytesTotal ()
+NetStreamFfmpeg::bufferLength ()
 {
-       long ret_val = 0;
-
-       if ( _netCon ) 
+       if ( ! m_parser.get() )
        {
-               ret_val = _netCon->getBytesTotal();
+               log_debug("bytesTotal: no parser, no party");
+               return 0;
        }
 
-       return ret_val;
+       boost::uint32_t maxTimeInBuffer = m_parser->getBufferLength();
+       boost::uint64_t curPos = _playHead.getPosition();
+
+       if ( maxTimeInBuffer < curPos ) return 0;
+       return maxTimeInBuffer-curPos;
 }
 
-NetStreamFfmpeg::PlaybackState
-NetStreamFfmpeg::playbackStatus(PlaybackState newstate)
+long
+NetStreamFfmpeg::bytesTotal ()
 {
-       boost::mutex::scoped_lock lock(_state_mutex);
-
-       if (newstate != PLAY_NONE) {
-               _playback_state = newstate;
-       }
+       if ( ! m_parser.get() )
+       {
+               log_debug("bytesTotal: no parser, no party");
+               return 0;
+       }
 
-       return _playback_state;
+       return m_parser->getBytesTotal();
 }
 
 NetStreamFfmpeg::DecodingState
@@ -1511,14 +1504,6 @@
        GNASH_REPORT_FUNCTION;
 
        {
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: waiting for lock in killDecodeThread");
-#endif
-               boost::mutex::scoped_lock lock(_qMutex);
-#ifdef GNASH_DEBUG_THREADS
-               log_debug("qMutex: lock obtained in killDecodeThread");
-#endif
-
                _qFillerKillRequest = true;
                _qFillerResume.notify_all(); // wake it up if waiting..
        }
@@ -1536,7 +1521,6 @@
 bool
 NetStreamFfmpeg::decodeThreadKillRequested()
 {
-       boost::mutex::scoped_lock lock(_qMutex);
        return _qFillerKillRequest;
 }
 
Index: server/asobj/NetStreamFfmpeg.h
===================================================================
RCS file: /sources/gnash/gnash/server/asobj/NetStreamFfmpeg.h,v
retrieving revision 1.68
diff -u -r1.68 NetStreamFfmpeg.h
--- server/asobj/NetStreamFfmpeg.h      23 May 2008 05:58:09 -0000      1.68
+++ server/asobj/NetStreamFfmpeg.h      24 May 2008 10:38:45 -0000
@@ -36,6 +36,9 @@
 #include <boost/thread/condition.hpp>
 #include <boost/thread/barrier.hpp>
 
+#include <memory>
+#include <cassert>
+
 #include "impl.h"
 
 #ifdef HAVE_FFMPEG_AVFORMAT_H
@@ -53,6 +56,7 @@
 #include "image.h"
 #include "StreamProvider.h"    
 #include "NetStream.h" // for inheritance
+#include "VirtualClock.h"
 
 #include "ffmpegNetStreamUtil.h"
 
@@ -109,6 +113,7 @@
 
        long bytesTotal();
 
+       long bufferLength();
 private:
 
        enum PlaybackState {
@@ -125,7 +130,6 @@
                DEC_BUFFERING,
        };
 
-       PlaybackState _playback_state;
        DecodingState _decoding_state;
 
        // Mutex protecting _playback_state and _decoding_state
@@ -163,7 +167,11 @@
        /// is that  refreshVideoFrame() is called right before get_video(). 
This is important
        /// to ensure timing is correct..
        ///
-       void refreshVideoFrame();
+       /// @param alsoIfPaused
+       ///     If true, video is consumed/refreshed even if playhead is paused.
+       ///     By default this is false, but will be used on ::seek 
(user-reguested)
+       ///
+       void refreshVideoFrame(bool alsoIfPaused=false);
 
        // Used to decode and push the next available (non-FLV) frame to the 
audio or video queue
        bool decodeMediaFrame();
@@ -197,35 +205,45 @@
        ///
        bool decodeFLVFrame();
 
-       /// Used to decode a video frame and push it on the videoqueue
+       /// Decode next video frame fetching it MediaParser cursor
+       //
+       /// @return 0 on EOF or error, a decoded video otherwise
+       ///
+       media::raw_mediadata_t* decodeNextVideoFrame();
+
+       /// Decode input frames up to the one with timestamp <= ts.
        //
-       /// Also updates m_imageframe (why !??)
+       /// Decoding starts from "next" element in the parser cursor.
        ///
+       /// Return 0 if:
+       ///     1. there's no parser active.
+       ///     2. parser cursor is already on last frame.
+       ///     3. next element in cursor has timestamp > tx
+       ///     4. there was an error decoding
+       ///
+       media::raw_mediadata_t* getDecodedVideoFrame(boost::uint32_t ts);
+
+       /// Used to decode a video frame 
+       //
        /// This is a blocking call.
-       /// If no Video decoding context exists (m_VCodecCtx), false is 
returned.
-       /// On decoding (or converting) error, false is returned.
-       /// If renderer requested video format is render::NONE, false is 
returned.
-       /// In any other case, true is returned.
+       /// If no Video decoding context exists (m_VCodecCtx), 0 is returned.
+       /// On decoding (or converting) error, 0 is returned.
+       /// If renderer requested video format is render::NONE, 0 is returned.
+       /// In any other case, a decoded video frame is returned.
        ///
-       /// NOTE: (FIXME) if video queue is full, 
-       ///       we'd still return true w/out pushing anything new there
-       /// 
        /// TODO: return a more informative value to tell what happened.
        ///
-       bool decodeVideo( AVPacket* packet );
+       media::raw_mediadata_t* decodeVideo( AVPacket* packet );
 
-       /// Used to decode a audio frame and push it on the audioqueue
+       /// Used to decode an audio frame 
        //
        /// This is a blocking call.
-       /// If no Video decoding context exists (m_ACodecCtx), false is 
returned.
-       /// In any other case, true is returned.
+       /// If no Video decoding context exists (m_ACodecCtx), 0 is returned.
+       /// In any other case, a decoded audio frame is returned.
        ///
-       /// NOTE: (FIXME) if audio queue is full,
-       ///       we'd still return true w/out pushing anything new there
-       /// 
        /// TODO: return a more informative value to tell what happened.
        ///
-       bool decodeAudio( AVPacket* packet );
+       media::raw_mediadata_t* decodeAudio( AVPacket* packet );
 
        // Used to calculate a decimal value from a ffmpeg fraction
        inline double as_double(AVRational time)
@@ -233,7 +251,6 @@
                return time.num / (double) time.den;
        }
 
-       PlaybackState playbackStatus(PlaybackState newstate = PLAY_NONE);
        DecodingState decodingStatus(DecodingState newstate = DEC_NONE);
 
        int m_video_index;
@@ -286,24 +303,14 @@
        // The timestamp of the last decoded audio frame, in seconds.
        volatile boost::uint32_t m_last_audio_timestamp;
 
-       // The timestamp of the last played audio (default) or video (if no 
audio) frame.
-       // Misured in seconds.
-       boost::uint32_t m_current_timestamp;
-
-       /// The queues of audio and video data.
-       typedef media::ElementsOwningQueue<media::raw_mediadata_t*> MediaQueue;
-
-       MediaQueue m_qaudio;
-       MediaQueue m_qvideo;
-
-       /// Mutex protecting access to queues
-       boost::mutex _qMutex;
-
        /// Queues filler will wait on this condition when queues are full
        boost::condition _qFillerResume;
 
-       // The time we started playing in seconds (since VM start ?)
-       volatile boost::uint64_t m_start_clock;
+       /// Virtual clock used as playback clock source
+       std::auto_ptr<InterruptableVirtualClock> _playbackClock;
+
+       /// Playback control device 
+       PlayHead _playHead;
 
        // When the queues are full, this is where we keep the audio/video frame
        // there wasn't room for on its queue
@@ -311,14 +318,16 @@
 
        ByteIOContext ByteIOCxt;
 
-       // Time of when pause started, in seconds since VM started
-       volatile boost::uint64_t m_time_of_pause;
-
        // Decoder buffer
        boost::uint8_t* _decoderBuffer;
 
        // Current sound handler
        media::sound_handler* _soundHandler;
+
+       /// Parse a chunk of input
+       /// Currently blocks, ideally should parse as much
+       /// as possible w/out blocking
+       void parseNextChunk();
 };
 
 
_______________________________________________
Gnash-dev mailing list
[email protected]
http://lists.gnu.org/mailman/listinfo/gnash-dev

Reply via email to